diff --git a/.formatter.exs b/.formatter.exs index 87be682..6f7cad5 100644 --- a/.formatter.exs +++ b/.formatter.exs @@ -1,5 +1,4 @@ # Used by "mix format" [ - inputs: [".formatter.exs", "mix.exs", "{config,lib,test}/**/*.{ex,exs}"], - line_length: 80 + inputs: [".formatter.exs", "mix.exs", "{config,lib,test}/**/*.{ex,exs}"] ] diff --git a/.gitignore b/.gitignore index 3172ef6..70ed94d 100644 --- a/.gitignore +++ b/.gitignore @@ -135,3 +135,4 @@ $RECYCLE.BIN/ # End of https://www.gitignore.io/api/vim,linux,emacs,elixir,windows,visualstudiocode +tmp \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 0bb8b49..a2bc49c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,15 +2,18 @@ sudo: required language: elixir elixir: - - 1.6.5 + - 1.8 +otp_release: + - 20.0 services: - docker install: - - docker-compose build + - docker-compose build mssql_ecto script: + - docker-compose run mssql_ecto mix format --check-formatted - docker-compose run mssql_ecto mix compile --warnings-as-errors - docker-compose run mssql_ecto mix coveralls.travis diff --git a/Dockerfile b/Dockerfile index ae3597b..d20b24a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM elixir:1.6.5-slim +FROM elixir:1.8.2-slim ENV DEBIAN_FRONTEND noninteractive @@ -19,7 +19,7 @@ ENV LC_ALL en_US.UTF-8 # --- MSSQL ODBC INSTALL --- RUN apt-get update && \ - apt-get -y install curl apt-transport-https gnupg2 && \ + apt-get -y install git curl apt-transport-https gnupg2 && \ curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \ curl https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list && \ apt-get update && \ @@ -36,4 +36,4 @@ RUN mix do deps.get, deps.compile # --- Be able to run wait for it script --- -RUN chmod +x /usr/src/app/wait-for-it.sh +RUN chmod +x /usr/src/app/bash_scripts/wait-for-it.sh diff --git a/LICENSE b/LICENSE index f4a8889..7a4a3ea 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,202 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, -and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by -the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all -other entities that control, are controlled by, or are under common -control with that entity. For the purposes of this definition, -"control" means (i) the power, direct or indirect, to cause the -direction or management of such entity, whether by contract or -otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity -exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, -including but not limited to software source code, documentation -source, and configuration files. - -"Object" form shall mean any form resulting from mechanical -transformation or translation of a Source form, including but -not limited to compiled object code, generated documentation, -and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or -Object form, made available under the License, as indicated by a -copyright notice that is included in or attached to the work -(an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object -form, that is based on (or derived from) the Work and for which the -editorial revisions, annotations, elaborations, or other modifications -represent, as a whole, an original work of authorship. For the purposes -of this License, Derivative Works shall not include works that remain -separable from, or merely link (or bind by name) to the interfaces of, -the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including -the original version of the Work and any modifications or additions -to that Work or Derivative Works thereof, that is intentionally -submitted to Licensor for inclusion in the Work by the copyright owner -or by an individual or Legal Entity authorized to submit on behalf of -the copyright owner. For the purposes of this definition, "submitted" -means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, -and issue tracking systems that are managed by, or on behalf of, the -Licensor for the purpose of discussing and improving the Work, but -excluding communication that is conspicuously marked or otherwise -designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity -on behalf of whom a Contribution has been received by Licensor and -subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the -Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -(except as stated in this section) patent license to make, have made, -use, offer to sell, sell, import, and otherwise transfer the Work, -where such license applies only to those patent claims licensable -by such Contributor that are necessarily infringed by their -Contribution(s) alone or by combination of their Contribution(s) -with the Work to which such Contribution(s) was submitted. If You -institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work -or a Contribution incorporated within the Work constitutes direct -or contributory patent infringement, then any patent licenses -granted to You under this License for that Work shall terminate -as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the -Work or Derivative Works thereof in any medium, with or without -modifications, and in Source or Object form, provided that You -meet the following conditions: - -(a) You must give any other recipients of the Work or -Derivative Works a copy of this License; and - -(b) You must cause any modified files to carry prominent notices -stating that You changed the files; and - -(c) You must retain, in the Source form of any Derivative Works -that You distribute, all copyright, patent, trademark, and -attribution notices from the Source form of the Work, -excluding those notices that do not pertain to any part of -the Derivative Works; and - -(d) If the Work includes a "NOTICE" text file as part of its -distribution, then any Derivative Works that You distribute must -include a readable copy of the attribution notices contained -within such NOTICE file, excluding those notices that do not -pertain to any part of the Derivative Works, in at least one -of the following places: within a NOTICE text file distributed -as part of the Derivative Works; within the Source form or -documentation, if provided along with the Derivative Works; or, -within a display generated by the Derivative Works, if and -wherever such third-party notices normally appear. The contents -of the NOTICE file are for informational purposes only and -do not modify the License. You may add Your own attribution -notices within Derivative Works that You distribute, alongside -or as an addendum to the NOTICE text from the Work, provided -that such additional attribution notices cannot be construed -as modifying the License. - -You may add Your own copyright statement to Your modifications and -may provide additional or different license terms and conditions -for use, reproduction, or distribution of Your modifications, or -for any such Derivative Works as a whole, provided Your use, -reproduction, and distribution of the Work otherwise complies with -the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, -any Contribution intentionally submitted for inclusion in the Work -by You to the Licensor shall be under the terms and conditions of -this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify -the terms of any separate license agreement you may have executed -with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade -names, trademarks, service marks, or product names of the Licensor, -except as required for reasonable and customary use in describing the -origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or -agreed to in writing, Licensor provides the Work (and each -Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -implied, including, without limitation, any warranties or conditions -of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A -PARTICULAR PURPOSE. You are solely responsible for determining the -appropriateness of using or redistributing the Work and assume any -risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, -whether in tort (including negligence), contract, or otherwise, -unless required by applicable law (such as deliberate and grossly -negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, -incidental, or consequential damages of any character arising as a -result of this License or out of the use or inability to use the -Work (including but not limited to damages for loss of goodwill, -work stoppage, computer failure or malfunction, or any and all -other commercial damages or losses), even if such Contributor -has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing -the Work or Derivative Works thereof, You may choose to offer, -and charge a fee for, acceptance of support, warranty, indemnity, -or other liability obligations and/or rights consistent with this -License. However, in accepting such obligations, You may act only -on Your own behalf and on Your sole responsibility, not on behalf -of any other Contributor, and only if You agree to indemnify, -defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason -of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following -boilerplate notice, with the fields enclosed by brackets "{}" -replaced with your own identifying information. (Don't include -the brackets!) The text should be enclosed in the appropriate -comment syntax for the file format. We also recommend that a -file or class name and description of purpose be included on the -same "printed page" as the copyright notice for easier -identification within third-party archives. - -Copyright 2017 Findmypast - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md index 96ce967..91996ca 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![Inline docs](http://inch-ci.org/github/findmypast-oss/mssql_ecto.svg?branch=master)](http://inch-ci.org/github/findmypast-oss/mssql_ecto) [![Ebert](https://ebertapp.io/github/findmypast-oss/mssql_ecto.svg)](https://ebertapp.io/github/findmypast-oss/mssql_ecto) [![Hex.pm](https://img.shields.io/hexpm/v/mssql_ecto.svg)](https://hex.pm/packages/mssql_ecto) -[![LICENSE](https://img.shields.io/hexpm/l/mssql_ecto.svg)](https://github.com/findmypast-oss/mssql_ecto/blob/master/LICENSE) +[![LICENSE](https://img.shields.io/hexpm/l/mssql_ecto.svg)](https://github.com/findmypast-oss/mssql_ecto/blob/master/docs/LICENSE) [Ecto](https://github.com/elixir-ecto/ecto) Adapter for [Mssqlex](https://github.com/findmypast-oss/mssqlex) @@ -29,52 +29,13 @@ or [other platforms](https://docs.microsoft.com/en-us/sql/connect/odbc/microsoft-odbc-driver-for-sql-server) on the official site. -### Hex +### Mix -#### With [Application Inference](https://elixir-lang.org/blog/2017/01/05/elixir-v1-4-0-released/#application-inference) - -If you are using -[application inference](https://elixir-lang.org/blog/2017/01/05/elixir-v1-4-0-released/#application-inference), -i.e. `application` in your `mix.exs` looks something like this: - -```elixir -def application do - [extra_applications: [:logger]] -end -``` - -Note, the lack of `:applications` key. Then, you just need to add the following -dependencies: +Add the following to your mix file: ```elixir def deps do - [{:mssql_ecto, "~> 1.2.0"}, - {:mssqlex, "~> 1.1.0"}] -end -``` - -#### Without [Application Inference](https://elixir-lang.org/blog/2017/01/05/elixir-v1-4-0-released/#application-inference) - -If you are explicitly calling out all running applications under `application` -in your `mix.exs`, i.e. it looks something like this: - -```elixir -def application do - [applications: [:logger, :plug, :postgrex]] -end -``` - -Then, you need to add `mssql_ecto` and `mssqlex` to both your `deps` and list of -running applications: - -```elixir -def application do - [applications: [:logger, :plug, :mssqlex, :mssql_ecto]] -end - -def deps do - [{:mssql_ecto, "~> 1.2.0"}, - {:mssqlex, "~> 1.1.0"}] + [{:mssql_ecto, "~> 2.0.0-beta.0"}] end ``` @@ -97,30 +58,32 @@ config :my_app, MyApp.Repo, An example project using mssql_ecto with Docker has kindly been created by [Chase Pursłey](https://github.com/cpursley). It can be viewed -[here](https://github.com/cpursley/mssql_ecto_friends). +[here](https://github.com/whossname/mssql_ecto_friends). ## Type Mappings -| Ecto Type | SQL Server Type | Caveats | -| :-------------: | :------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | -| :id | int | | -| :serial | int identity(1, 1) | | -| :bigserial | bigint identity(1,1) | When a query is returning this value with the `returning` syntax and no schema is used, it will be returned as a string rather than an integer value | -| :binary_id | char(36) | | -| :uuid | char(36) | | -| :string | nvarchar | | -| :binary | nvarchar(4000) | Limited size, not fully implemented | -| :integer | int | | -| :boolean | bit | | -| {:array, type} | list of type | Not Supported | -| :map | nvarchar(4000) | Not Supported | -| {:map, \_} | nvarchar(4000) | Not Supported | -| :date | date | | -| :time | time | Can write but can't read | -| :utc_datetime | datetime2 | | -| :naive_datetime | datetime2 | | -| :float | float | | -| :decimal | decimal | | +### Needs testing/validation + +| Ecto Type | SQL Server Type | Caveats | +| :-------------: | :------------------: | :---------------------------------: | +| :id | int | | +| :serial | int identity(1, 1) | | +| :bigserial | bigint identity(1,1) | | +| :binary_id | char(36) | | +| :uuid | char(36) | | +| :string | nvarchar | | +| :binary | nvarchar(4000) | Limited size, not fully implemented | +| :integer | int | | +| :boolean | bit | | +| {:array, type} | list of type | Not Supported | +| :map | nvarchar(4000) | Not Supported | +| {:map, \_} | nvarchar(4000) | Not Supported | +| :date | date | | +| :time | time | Can write but can't read | +| :utc_datetime | datetime2 | | +| :naive_datetime | datetime2 | | +| :float | float | | +| :decimal | decimal | | ## Features not yet implemented @@ -129,11 +92,15 @@ An example project using mssql_ecto with Docker has kindly been created by - On conflict - Upserts +## Known Issues + +See the the list of [known issues](https://github.com/findmypast-oss/mssqlex#known-issues). + ## Contributing -### Integration Test Setup +### Test Setup -Running the integration tests requires an instance of SQL Server running on +Running the tests requires an instance of SQL Server running on `localhost` and certain configuration variables set as environment variables: - MSSQL_DVR should be set to the ODBC driver to be used. Usually @@ -145,8 +112,11 @@ Running the integration tests requires an instance of SQL Server running on The tests will create a database named `mssql_ecto_integration_test` +The script `/bash_scripts/setup_test_db.sh` starts a docker image that holds +the test database. + ### Code of Conduct This project had a -[Code of Conduct](https://github.com/findmypast-oss/mssql_ecto/blob/master/CODE_OF_CONDUCT.md) +[Code of Conduct](https://github.com/findmypast-oss/mssql_ecto/blob/master/docs/CODE_OF_CONDUCT.md) if you wish to contribute to this project, please abide by its rules. diff --git a/bash_scripts/setup_test_db.sh b/bash_scripts/setup_test_db.sh new file mode 100755 index 0000000..95f43d3 --- /dev/null +++ b/bash_scripts/setup_test_db.sh @@ -0,0 +1,6 @@ +export MSSQL_UID=sa +export MSSQL_PWD='ThePa$$word' +docker stop test_mssql_server +docker rm test_mssql_server +docker run --name test_mssql_server -e 'ACCEPT_EULA=Y' -e SA_PASSWORD=$MSSQL_PWD -p 1433:1433 -d microsoft/mssql-server-linux +echo 'Created docker container test_mssql_server' diff --git a/bash_scripts/test_db_cli.sh b/bash_scripts/test_db_cli.sh new file mode 100755 index 0000000..4a4f21b --- /dev/null +++ b/bash_scripts/test_db_cli.sh @@ -0,0 +1 @@ +sudo docker exec -it test_mssql_server /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P 'ThePa$$word' diff --git a/wait-for-it.sh b/bash_scripts/wait-for-it.sh similarity index 100% rename from wait-for-it.sh rename to bash_scripts/wait-for-it.sh diff --git a/docker-compose.yml b/docker-compose.yml index 4a0d64c..f6bc93d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,24 +4,25 @@ services: image: microsoft/mssql-server-linux environment: - ACCEPT_EULA=Y - - SA_PASSWORD=TestPa$$word123 - - MSSQL_TCP_PORT=9204 + - SA_PASSWORD=ThePa$$word + - MSSQL_TCP_PORT=1433 mssql_ecto: build: . environment: - MIX_ENV=test - MSSQL_UID=sa - - MSSQL_PWD=TestPa$$word123 + - MSSQL_PWD=ThePa$$word - MSSQL_HST=sql_server - MSSQL_IN=MSSQLSERVER - - MSSQL_PRT=9204 + - MSSQL_PRT=1433 - MSSQL_DVR={ODBC Driver 17 for SQL Server} - TRAVIS_JOB_ID=$TRAVIS_JOB_ID + depends_on: + - sql_server volumes: - .:/usr/src/app/ - /usr/src/app/deps - /usr/src/app/_build - entrypoint: ./wait-for-it.sh sql_server:9204 -- + entrypoint: ./bash_scripts/wait-for-it.sh sql_server:1433 -- command: mix test - diff --git a/CHANGELOG.md b/docs/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to docs/CHANGELOG.md diff --git a/CODE_OF_CONDUCT.md b/docs/CODE_OF_CONDUCT.md similarity index 100% rename from CODE_OF_CONDUCT.md rename to docs/CODE_OF_CONDUCT.md diff --git a/ISSUE_TEMPLATE.md b/docs/ISSUE_TEMPLATE.md similarity index 100% rename from ISSUE_TEMPLATE.md rename to docs/ISSUE_TEMPLATE.md diff --git a/docs/LICENSE b/docs/LICENSE new file mode 100644 index 0000000..f4a8889 --- /dev/null +++ b/docs/LICENSE @@ -0,0 +1,201 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files. + +"Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions: + +(a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and + +(b) You must cause any modified files to carry prominent notices +stating that You changed the files; and + +(c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and + +(d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License. + +You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "{}" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives. + +Copyright 2017 Findmypast + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/PULL_REQUEST_TEMPLATE.md b/docs/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from PULL_REQUEST_TEMPLATE.md rename to docs/PULL_REQUEST_TEMPLATE.md diff --git a/integration/mssql/all_ecto_sql_test.exs b/integration/mssql/all_ecto_sql_test.exs new file mode 100644 index 0000000..da9b6a1 --- /dev/null +++ b/integration/mssql/all_ecto_sql_test.exs @@ -0,0 +1,34 @@ +ecto_sql = Mix.Project.deps_paths()[:ecto_sql] +ecto_sql = "#{ecto_sql}/integration_test/sql" + +Code.require_file("logging.exs", ecto_sql) +Code.require_file("sandbox.exs", ecto_sql) +Code.require_file("sql.exs", ecto_sql) + +""" + +# Partial Support + +> Code.require_file("subquery.exs", ecto_sql) +1 test fails due to the mssql order by clause. + +> Code.require_file("alter.exs", ecto_sql) +Invalid character value for cast specification + +> Code.require_file("migration.exs", ecto_sql) +Most tests pass. Of the three failing tests two seem to be because of MSSQL specific behaviour. + + +# No Support +These tests fail because of the "No SQL-driver information available." error. +I think it is because the lock isn't implemented properly. +It could also be a type issue. + +> Code.require_file("transaction.exs", ecto_sql) +> Code.require_file("lock.exs", ecto_sql) +> Code.require_file("migrator.exs", ecto_sql) + + +# Not Implemented +> Code.require_file("stream.exs", ecto_sql) +""" diff --git a/integration/mssql/all_ecto_test.exs b/integration/mssql/all_ecto_test.exs new file mode 100644 index 0000000..feec8b9 --- /dev/null +++ b/integration/mssql/all_ecto_test.exs @@ -0,0 +1,43 @@ +ecto = Mix.Project.deps_paths()[:ecto] +ecto = "#{ecto}/integration_test/cases" + +""" + +# Partial Support + +> Code.require_file("assoc.exs", ecto) +Most tests pass. + +* Throws "No SQL-driver information available." error instead of expected errors. +* Unique contraint +* cascading delete doesn't work +* many-to-many doesn't return duplicates + +> Code.require_file("joins.exs", ecto) +2 tests fail. + +* Incorrect syntax near the keyword 'ON' +* Unique constraint + + +> Code.require_file("preload.exs", ecto) +most tests pass. Needs investigation + +* Unique constraint +* Incorrect syntax 'COUNT' +* Incorrect preload behaviour + +> Code.require_file("repo.exs", ecto) +most tests pass. Needs investigation + +# Needs investigation + +> Code.require_file("type.exs", ecto) +> Code.require_file("windows.exs", ecto) + + +# Not Implemented +> Code.require_file("interval.exs", ecto) +The complex date time stuff is not implemented + +""" diff --git a/integration/mssql/all_test.exs b/integration/mssql/all_test.exs deleted file mode 100644 index 66135df..0000000 --- a/integration/mssql/all_test.exs +++ /dev/null @@ -1,19 +0,0 @@ -# Full Support -Code.require_file("./sql/alter.exs", __DIR__) -Code.require_file("./sql/migration.exs", __DIR__) -Code.require_file("./sql/sandbox.exs", __DIR__) -Code.require_file("./sql/sql.exs", __DIR__) -Code.require_file("./sql/subquery.exs", __DIR__) - -Code.require_file("./cases/assoc.exs", __DIR__) -Code.require_file("./cases/interval.exs", __DIR__) -Code.require_file("./cases/joins.exs", __DIR__) -Code.require_file("./cases/migrator.exs", __DIR__) -Code.require_file("./cases/preload.exs", __DIR__) -Code.require_file("./cases/type.exs", __DIR__) -Code.require_file("./cases/repo.exs", __DIR__) - -# Partial / No Support -# Code.require_file "./sql/lock.exs", __DIR__ -# Code.require_file "./sql/stream.exs", __DIR__ -# Code.require_file "./sql/transaction.exs", __DIR__ diff --git a/integration/mssql/cases/interval.exs b/integration/mssql/cases/interval.exs deleted file mode 100644 index 0d0eb7a..0000000 --- a/integration/mssql/cases/interval.exs +++ /dev/null @@ -1,757 +0,0 @@ -defmodule Ecto.Integration.IntervalTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) - - alias Ecto.Integration.Post - alias Ecto.Integration.TestRepo - import Ecto.Query - - @posted ~D[2014-01-01] - @inserted_at ~N[2014-01-01 02:00:00.0] - - setup do - TestRepo.insert!(%Post{posted: @posted, inserted_at: @inserted_at}) - :ok - end - - test "date_add with year" do - dec = Decimal.new(1) - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 1, "year")) - ) - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 1.0, "year")) - ) - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^1, "year")) - ) - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^1.0, "year")) - ) - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "year")) - ) - end - - test "date_add with month" do - dec = Decimal.new(3) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 3, "month")) - ) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 3.0, "month")) - ) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^3, "month")) - ) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^3.0, "month")) - ) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "month")) - ) - end - - test "date_add with week" do - dec = Decimal.new(3) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 3, "week")) - ) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 3.0, "week")) - ) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^3, "week")) - ) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^3.0, "week")) - ) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "week")) - ) - end - - test "date_add with day" do - dec = Decimal.new(5) - - assert ["2014-01-06"] = - TestRepo.all(from(p in Post, select: date_add(p.posted, 5, "day"))) - - assert ["2014-01-06"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 5.0, "day")) - ) - - assert ["2014-01-06"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^5, "day")) - ) - - assert ["2014-01-06"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^5.0, "day")) - ) - - assert ["2014-01-06"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "day")) - ) - end - - @tag :not_supported_by_sql_server - test "date_add with hour" do - dec = Decimal.new(48) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 48, "hour")) - ) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, 48.0, "hour")) - ) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^48, "hour")) - ) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^48.0, "hour")) - ) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "hour")) - ) - end - - test "date_add with dynamic" do - posted = @posted - - assert ["2015-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(^posted, ^1, ^"year")) - ) - - assert ["2014-04-01"] = - TestRepo.all( - from(p in Post, select: date_add(^posted, ^3, ^"month")) - ) - - assert ["2014-01-22"] = - TestRepo.all( - from(p in Post, select: date_add(^posted, ^3, ^"week")) - ) - - assert ["2014-01-06"] = - TestRepo.all( - from(p in Post, select: date_add(^posted, ^5, ^"day")) - ) - - assert ["2014-01-03"] = - TestRepo.all( - from(p in Post, select: date_add(^posted, ^48, ^"hour")) - ) - end - - test "date_add with negative interval" do - dec = Decimal.new(-1) - - assert ["2013-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, -1, "year")) - ) - - assert ["2013-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, -1.0, "year")) - ) - - assert ["2013-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^(-1), "year")) - ) - - assert ["2013-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^(-1.0), "year")) - ) - - assert ["2013-01-01"] = - TestRepo.all( - from(p in Post, select: date_add(p.posted, ^dec, "year")) - ) - end - - test "datetime_add with year" do - dec = Decimal.new(1) - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 1, "year")) - ) - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 1.0, "year")) - ) - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^1, "year")) - ) - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^1.0, "year") - ) - ) - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "year") - ) - ) - end - - test "datetime_add with month" do - dec = Decimal.new(3) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 3, "month")) - ) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 3.0, "month") - ) - ) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^3, "month")) - ) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^3.0, "month") - ) - ) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "month") - ) - ) - end - - test "datetime_add with week" do - dec = Decimal.new(3) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 3, "week")) - ) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 3.0, "week")) - ) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^3, "week")) - ) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^3.0, "week") - ) - ) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "week") - ) - ) - end - - test "datetime_add with day" do - dec = Decimal.new(5) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 5, "day")) - ) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 5.0, "day")) - ) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^5, "day")) - ) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^5.0, "day")) - ) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^dec, "day")) - ) - end - - test "datetime_add with hour" do - dec = Decimal.new(60) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, 60, "hour")) - ) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 60.0, "hour") - ) - ) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, ^60, "hour")) - ) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^60.0, "hour") - ) - ) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "hour") - ) - ) - end - - test "datetime_add with minute" do - dec = Decimal.new(90) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 90, "minute") - ) - ) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 90.0, "minute") - ) - ) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^90, "minute") - ) - ) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^90.0, "minute") - ) - ) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "minute") - ) - ) - end - - test "datetime_add with second" do - dec = Decimal.new(90) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 90, "second") - ) - ) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 90.0, "second") - ) - ) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^90, "second") - ) - ) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^90.0, "second") - ) - ) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "second") - ) - ) - end - - @tag :uses_msec - test "datetime_add with millisecond" do - dec = Decimal.new(1500) - - assert [{{2014, 1, 1}, {2, 0, 1, 500_000}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 1500, "millisecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 1, 500_000}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 1500.0, "millisecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 1, 500_000}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^1500, "millisecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 1, 500_000}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^1500.0, "millisecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 1, 500_000}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "millisecond") - ) - ) - end - - @tag :uses_usec - test "datetime_add with microsecond" do - dec = Decimal.new(1500) - - assert [{{2014, 1, 1}, {2, 0, 0, 1500}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 1500, "microsecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 0, 1500}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, 1500.0, "microsecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 0, 1500}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^1500, "microsecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 0, 1500}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^1500.0, "microsecond") - ) - ) - - assert [{{2014, 1, 1}, {2, 0, 0, 1500}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "microsecond") - ) - ) - end - - test "datetime_add with dynamic" do - inserted_at = @inserted_at - - assert [{{2015, 1, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(^inserted_at, ^1, ^"year")) - ) - - assert [{{2014, 4, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(^inserted_at, ^3, ^"month")) - ) - - assert [{{2014, 1, 22}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(^inserted_at, ^3, ^"week")) - ) - - assert [{{2014, 1, 6}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(^inserted_at, ^5, ^"day")) - ) - - assert [{{2014, 1, 3}, {14, 0, 0, 0}}] = - TestRepo.all( - from(p in Post, select: datetime_add(^inserted_at, ^60, ^"hour")) - ) - - assert [{{2014, 1, 1}, {3, 30, 0, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(^inserted_at, ^90, ^"minute") - ) - ) - - assert [{{2014, 1, 1}, {2, 1, 30, 0}}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(^inserted_at, ^90, ^"second") - ) - ) - end - - test "datetime_add with dynamic in filters" do - inserted_at = @inserted_at - field = :inserted_at - - assert [_] = - TestRepo.all( - from( - p in Post, - where: - p.inserted_at > datetime_add(^inserted_at, ^(-1), "year") - ) - ) - - assert [_] = - TestRepo.all( - from( - p in Post, - where: p.inserted_at > datetime_add(^inserted_at, -3, "month") - ) - ) - - assert [_] = - TestRepo.all( - from( - p in Post, - where: - field(p, ^field) > datetime_add(^inserted_at, ^(-3), ^"week") - ) - ) - - assert [_] = - TestRepo.all( - from( - p in Post, - where: - field(p, ^field) > datetime_add(^inserted_at, -5, ^"day") - ) - ) - end - - test "datetime_add with negative interval" do - dec = Decimal.new(-1) - - assert [{{2013, 1, 1}, _}] = - TestRepo.all( - from(p in Post, select: datetime_add(p.inserted_at, -1, "year")) - ) - - assert [{{2013, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, -1.0, "year") - ) - ) - - assert [{{2013, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^(-1), "year") - ) - ) - - assert [{{2013, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^(-1.0), "year") - ) - ) - - assert [{{2013, 1, 1}, _}] = - TestRepo.all( - from( - p in Post, - select: datetime_add(p.inserted_at, ^dec, "year") - ) - ) - end - - test "from_now" do - current = DateTime.utc_now().year - dec = Decimal.new(5) - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: from_now(5, "year"))) - - assert y > current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: from_now(5.0, "year"))) - - assert y > current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: from_now(^5, "year"))) - - assert y > current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: from_now(^5.0, "year"))) - - assert y > current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: from_now(^dec, "year"))) - - assert y > current - end - - test "ago" do - current = DateTime.utc_now().year - dec = Decimal.new(5) - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: ago(5, "year"))) - - assert y < current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: ago(5.0, "year"))) - - assert y < current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: ago(^5, "year"))) - - assert y < current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: ago(^5.0, "year"))) - - assert y < current - - assert [{{y, _, _}, _}] = - TestRepo.all(from(p in Post, select: ago(^dec, "year"))) - - assert y < current - end -end diff --git a/integration/mssql/cases/migrator.exs b/integration/mssql/cases/migrator.exs deleted file mode 100644 index dde9e19..0000000 --- a/integration/mssql/cases/migrator.exs +++ /dev/null @@ -1,156 +0,0 @@ -Code.require_file("../support/file_helpers.exs", __DIR__) - -defmodule Ecto.Integration.MigratorTest do - use Ecto.Integration.Case - - import Support.FileHelpers - import Ecto.Migrator, only: [migrated_versions: 1] - - alias Ecto.Integration.PoolRepo - alias Ecto.Migration.SchemaMigration - - setup do - PoolRepo.delete_all(SchemaMigration) - :ok - end - - defmodule GoodMigration do - use Ecto.Migration - - def up do - :ok - end - - def down do - :ok - end - end - - defmodule BadMigration do - use Ecto.Migration - - def change do - execute("CREATE WHAT") - end - end - - import Ecto.Migrator - - test "schema migration" do - up(PoolRepo, 20_100_906_120_000, GoodMigration, log: false) - - [migration] = PoolRepo.all(SchemaMigration) - assert migration.version == 20_100_906_120_000 - assert migration.inserted_at - end - - test "migrations up and down" do - assert migrated_versions(PoolRepo) == [] - assert up(PoolRepo, 20_100_906_120_000, GoodMigration, log: false) == :ok - - assert migrated_versions(PoolRepo) == [20_100_906_120_000] - - assert up(PoolRepo, 20_100_906_120_000, GoodMigration, log: false) == - :already_up - - assert migrated_versions(PoolRepo) == [20_100_906_120_000] - - assert down(PoolRepo, 21_100_906_120_000, GoodMigration, log: false) == - :already_down - - assert migrated_versions(PoolRepo) == [20_100_906_120_000] - assert down(PoolRepo, 20_100_906_120_000, GoodMigration, log: false) == :ok - assert migrated_versions(PoolRepo) == [] - end - - test "bad migration" do - assert catch_error( - up(PoolRepo, 20_100_906_120_000, BadMigration, log: false) - ) - end - - test "run up to/step migration" do - in_tmp(fn path -> - create_migration(47) - create_migration(48) - - assert [47] = run(PoolRepo, path, :up, step: 1, log: false) - assert count_entries() == 1 - - assert [48] = run(PoolRepo, path, :up, to: 48, log: false) - end) - end - - test "run down to/step migration" do - in_tmp(fn path -> - migrations = [ - create_migration(49), - create_migration(50) - ] - - assert [49, 50] = run(PoolRepo, path, :up, all: true, log: false) - purge(migrations) - - assert [50] = run(PoolRepo, path, :down, step: 1, log: false) - purge(migrations) - - assert count_entries() == 1 - assert [50] = run(PoolRepo, path, :up, to: 50, log: false) - end) - end - - test "runs all migrations" do - in_tmp(fn path -> - migrations = [ - create_migration(53), - create_migration(54) - ] - - assert [53, 54] = run(PoolRepo, path, :up, all: true, log: false) - assert [] = run(PoolRepo, path, :up, all: true, log: false) - purge(migrations) - - assert [54, 53] = run(PoolRepo, path, :down, all: true, log: false) - purge(migrations) - - assert count_entries() == 0 - assert [53, 54] = run(PoolRepo, path, :up, all: true, log: false) - end) - end - - defp count_entries() do - length(Process.get(:migrations)) - end - - defp create_migration(num) do - module = Module.concat(__MODULE__, "Migration#{num}") - - File.write!("#{num}_migration_#{num}.exs", """ - defmodule #{module} do - use Ecto.Migration - - - def up do - update &[#{num}|&1] - end - - def down do - update &List.delete(&1, #{num}) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end - end - """) - - module - end - - defp purge(modules) do - Enum.each(List.wrap(modules), fn m -> - :code.delete(m) - :code.purge(m) - end) - end -end diff --git a/integration/mssql/cases/repo.exs b/integration/mssql/cases/repo.exs deleted file mode 100644 index ac39ae3..0000000 --- a/integration/mssql/cases/repo.exs +++ /dev/null @@ -1,2152 +0,0 @@ -Code.require_file("../support/types.exs", __DIR__) - -defmodule Ecto.Integration.RepoTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) - - alias Ecto.Integration.TestRepo - import Ecto.Query - - alias Ecto.Integration.Post - alias Ecto.Integration.User - alias Ecto.Integration.Comment - alias Ecto.Integration.Permalink - alias Ecto.Integration.Custom - alias Ecto.Integration.Barebone - alias Ecto.Integration.CompositePk - alias Ecto.Integration.PostUsecTimestamps - alias Ecto.Integration.PostUserCompositePk - - test "returns already started for started repos" do - assert {:error, {:already_started, _}} = TestRepo.start_link() - end - - test "fetch empty" do - assert TestRepo.all(Post) == [] - assert TestRepo.all(from(p in Post)) == [] - end - - test "fetch with in" do - TestRepo.insert!(%Post{title: "hello"}) - - # Works without the query cache. - assert_raise Ecto.Query.CastError, fn -> - TestRepo.all(from(p in Post, where: p.title in ^nil)) - end - - assert [] = TestRepo.all(from(p in Post, where: p.title in [])) - assert [] = TestRepo.all(from(p in Post, where: p.title in ["1", "2", "3"])) - assert [] = TestRepo.all(from(p in Post, where: p.title in ^[])) - - assert [_] = TestRepo.all(from(p in Post, where: p.title not in [])) - - assert [_] = - TestRepo.all( - from(p in Post, where: p.title in ["1", "hello", "3"]) - ) - - assert [_] = - TestRepo.all( - from(p in Post, where: p.title in ["1", ^"hello", "3"]) - ) - - assert [_] = - TestRepo.all( - from(p in Post, where: p.title in ^["1", "hello", "3"]) - ) - - # Still doesn't work after the query cache. - assert_raise Ecto.Query.CastError, fn -> - TestRepo.all(from(p in Post, where: p.title in ^nil)) - end - end - - test "fetch without schema" do - %Post{} = TestRepo.insert!(%Post{title: "title1"}) - %Post{} = TestRepo.insert!(%Post{title: "title2"}) - - assert ["title1", "title2"] = - TestRepo.all( - from(p in "posts", order_by: p.title, select: p.title) - ) - - assert [_] = - TestRepo.all( - from(p in "posts", where: p.title == "title1", select: p.id) - ) - end - - @tag :invalid_prefix - test "fetch with invalid prefix" do - assert catch_error(TestRepo.all("posts", prefix: "oops")) - end - - test "insert, update and delete" do - post = %Post{title: "insert, update, delete", text: "fetch empty"} - meta = post.__meta__ - - assert %Post{} = inserted = TestRepo.insert!(post) - - assert %Post{} = - updated = - TestRepo.update!(Ecto.Changeset.change(inserted, text: "new")) - - deleted_meta = put_in(meta.state, :deleted) - assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated) - - loaded_meta = put_in(meta.state, :loaded) - assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post) - - post = TestRepo.one(Post) - assert post.__meta__.state == :loaded - assert post.inserted_at - end - - test "insert, update and delete with field source" do - permalink = %Permalink{url: "url"} - assert %Permalink{url: "url"} = inserted = TestRepo.insert!(permalink) - - assert %Permalink{url: "new"} = - updated = - TestRepo.update!(Ecto.Changeset.change(inserted, url: "new")) - - assert %Permalink{url: "new"} = TestRepo.delete!(updated) - end - - @tag :composite_pk - test "insert, update and delete with composite pk" do - c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"}) - c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"}) - - assert CompositePk |> first |> TestRepo.one() == c1 - assert CompositePk |> last |> TestRepo.one() == c2 - - changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)) - c1 = TestRepo.update!(changeset) - assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1 - - TestRepo.delete!(c2) - assert TestRepo.all(CompositePk) == [c1] - - assert_raise ArgumentError, ~r"to have exactly one primary key", fn -> - TestRepo.get(CompositePk, []) - end - - assert_raise ArgumentError, ~r"to have exactly one primary key", fn -> - TestRepo.get!(CompositePk, [1, 2]) - end - end - - @tag :composite_pk - test "insert, update and delete with associated composite pk" do - user = TestRepo.insert!(%User{}) - post = TestRepo.insert!(%Post{title: "post title", text: "post text"}) - - user_post = - TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id}) - - assert TestRepo.get_by!( - PostUserCompositePk, - user_id: user.id, - post_id: post.id - ) == user_post - - TestRepo.delete!(user_post) - assert TestRepo.all(PostUserCompositePk) == [] - end - - @tag :invalid_prefix - test "insert, update and delete with invalid prefix" do - post = TestRepo.insert!(%Post{}) - changeset = Ecto.Changeset.change(post, title: "foo") - assert catch_error(TestRepo.insert(%Post{}, prefix: "oops")) - assert catch_error(TestRepo.update(changeset, prefix: "oops")) - assert catch_error(TestRepo.delete(changeset, prefix: "oops")) - end - - test "insert and update with changeset" do - # On insert we merge the fields and changes - changeset = - Ecto.Changeset.cast( - %Post{text: "x", title: "wrong"}, - %{"title" => "hello", "temp" => "unknown"}, - ~w(title temp) - ) - - post = TestRepo.insert!(changeset) - assert %Post{text: "x", title: "hello", temp: "unknown"} = post - - assert %Post{text: "x", title: "hello", temp: "temp"} = - TestRepo.get!(Post, post.id) - - # On update we merge only fields, direct schema changes are discarded - changeset = - Ecto.Changeset.cast( - %{post | text: "y"}, - %{"title" => "world", "temp" => "unknown"}, - ~w(title temp) - ) - - assert %Post{text: "y", title: "world", temp: "unknown"} = - TestRepo.update!(changeset) - - assert %Post{text: "x", title: "world", temp: "temp"} = - TestRepo.get!(Post, post.id) - end - - test "insert and update with empty changeset" do - # On insert we merge the fields and changes - changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w()) - assert %Permalink{} = permalink = TestRepo.insert!(changeset) - - # Assert we can update the same value twice, - # without changes, without triggering stale errors. - changeset = Ecto.Changeset.cast(permalink, %{}, ~w()) - assert TestRepo.update!(changeset) == permalink - assert TestRepo.update!(changeset) == permalink - end - - @tag :no_primary_key - test "insert with no primary key" do - assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{}) - assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13}) - end - - @tag :read_after_writes - test "insert and update with changeset read after writes" do - defmodule RAW do - use Ecto.Schema - - schema "comments" do - field(:text, :string) - field(:lock_version, :integer, read_after_writes: true) - end - end - - changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w()) - - # If the field is nil, we will not send it - # and read the value back from the database. - assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset) - - # Set the counter to 11, so we can read it soon - TestRepo.update_all( - from(u in RAW, where: u.id == ^cid), - set: [lock_version: 11] - ) - - # We will read back on update too - changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)) - - assert %{id: ^cid, lock_version: 11, text: "0"} = - TestRepo.update!(changeset) - end - - test "insert autogenerates for custom type" do - post = TestRepo.insert!(%Post{uuid: nil}) - assert byte_size(post.uuid) == 36 - assert TestRepo.get_by(Post, uuid: post.uuid) == post - end - - @tag :id_type - test "insert autogenerates for custom id type" do - defmodule ID do - use Ecto.Schema - - @primary_key {:id, Elixir.Custom.Permalink, autogenerate: true} - schema "posts" do - end - end - - id = TestRepo.insert!(struct(ID, id: nil)) - assert id.id - assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id - end - - @tag :id_type - @tag :assigns_id_type - @tag :identity_insert - test "insert with user-assigned primary key" do - assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1}) - end - - @tag :id_type - @tag :assigns_id_type - @tag :identity_insert - test "insert and update with user-assigned primary key in changeset" do - changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)) - assert %Post{id: 13} = post = TestRepo.insert!(changeset) - - changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)) - assert %Post{id: 15} = TestRepo.update!(changeset) - end - - @tag :uses_usec - test "insert and fetch a schema with timestamps with usec" do - p1 = TestRepo.insert!(%PostUsecTimestamps{title: "hello"}) - assert [p1] == TestRepo.all(PostUsecTimestamps) - end - - test "insert and fetch a schema with utc timestamps" do - datetime = - (System.system_time(:seconds) * 1_000_000) - |> DateTime.from_unix!(:microseconds) - - TestRepo.insert!(%User{inserted_at: datetime}) - assert [%{inserted_at: ^datetime}] = TestRepo.all(User) - end - - test "optimistic locking in update/delete operations" do - import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2] - base_post = TestRepo.insert!(%Comment{}) - - cs_ok = - base_post - |> cast(%{"text" => "foo.bar"}, ~w(text)) - |> optimistic_lock(:lock_version) - - TestRepo.update!(cs_ok) - - cs_stale = optimistic_lock(base_post, :lock_version) - assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(cs_stale) end - assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(cs_stale) end - end - - @tag :unique_constraint - test "unique constraint" do - changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate()) - {:ok, _} = TestRepo.insert(changeset) - - exception = - assert_raise Ecto.ConstraintError, - ~r/constraint error when attempting to insert struct/, - fn -> - changeset - |> TestRepo.insert() - end - - assert exception.message =~ "unique: posts_uuid_index" - assert exception.message =~ "The changeset has not defined any constraint." - - message = ~r/constraint error when attempting to insert struct/ - - exception = - assert_raise Ecto.ConstraintError, message, fn -> - changeset - |> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset) - |> TestRepo.insert() - end - - assert exception.message =~ "unique: posts_email_changeset" - - {:error, changeset} = - changeset - |> Ecto.Changeset.unique_constraint(:uuid) - |> TestRepo.insert() - - assert changeset.errors == [uuid: {"has already been taken", []}] - assert changeset.data.__meta__.state == :built - end - - @tag :unique_constraint - test "unique constraint from association" do - uuid = Ecto.UUID.generate() - - post = - &(%Post{} |> Ecto.Changeset.change(uuid: &1) - |> Ecto.Changeset.unique_constraint(:uuid)) - - {:error, changeset} = - TestRepo.insert(%User{ - comments: [%Comment{}], - permalink: %Permalink{}, - posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate())] - }) - - [_, p2, _] = changeset.changes.posts - assert p2.errors == [uuid: {"has already been taken", []}] - end - - @tag :id_type - @tag :unique_constraint - test "unique constraint with binary_id" do - changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate()) - {:ok, _} = TestRepo.insert(changeset) - - {:error, changeset} = - changeset - |> Ecto.Changeset.unique_constraint(:uuid) - |> TestRepo.insert() - - assert changeset.errors == [uuid: {"has already been taken", []}] - assert changeset.data.__meta__.state == :built - end - - test "unique pseudo-constraint violation error message with join table at the repository" do - post = - TestRepo.insert!(%Post{title: "some post"}) - |> TestRepo.preload(:unique_users) - - user = TestRepo.insert!(%User{name: "some user"}) - - # Violate the unique composite index - {:error, changeset} = - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:unique_users, [user, user]) - |> TestRepo.update() - - errors = - Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) - - assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]} - refute changeset.valid? - end - - @tag :join - @tag :unique_constraint - test "unique constraint violation error message with join table in single changeset" do - post = - TestRepo.insert!(%Post{title: "some post"}) - |> TestRepo.preload(:constraint_users) - - user = TestRepo.insert!(%User{name: "some user"}) - - # Violate the unique composite index - {:error, changeset} = - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:constraint_users, [user, user]) - |> Ecto.Changeset.unique_constraint( - :user, - name: :posts_users_composite_pk_post_id_user_id_index, - message: "has already been assigned" - ) - |> TestRepo.update() - - errors = - Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) - - assert errors == %{ - constraint_users: [%{}, %{user: ["has already been assigned"]}] - } - - refute changeset.valid? - end - - @tag :join - @tag :unique_constraint - test "unique constraint violation error message with join table and separate changesets" do - post = - TestRepo.insert!(%Post{title: "some post"}) - |> TestRepo.preload(:constraint_users) - - user = TestRepo.insert!(%User{name: "some user"}) - - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:constraint_users, [user]) - |> TestRepo.update() - - # Violate the unique composite index - {:error, changeset} = - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:constraint_users, [user]) - |> Ecto.Changeset.unique_constraint( - :user, - name: :posts_users_composite_pk_post_id_user_id_index, - message: "has already been assigned" - ) - |> TestRepo.update() - - errors = - Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) - - assert errors == %{ - constraint_users: [%{user: ["has already been assigned"]}] - } - - refute changeset.valid? - end - - @tag :foreign_key_constraint - test "foreign key constraint" do - changeset = Ecto.Changeset.change(%Comment{post_id: 0}) - - exception = - assert_raise Ecto.ConstraintError, - ~r/constraint error when attempting to insert struct/, - fn -> - changeset - |> TestRepo.insert() - end - - assert exception.message =~ "foreign_key: comments_post_id_fkey" - assert exception.message =~ "The changeset has not defined any constraint." - - message = ~r/constraint error when attempting to insert struct/ - - exception = - assert_raise Ecto.ConstraintError, message, fn -> - changeset - |> Ecto.Changeset.foreign_key_constraint( - :post_id, - name: :comments_post_id_other - ) - |> TestRepo.insert() - end - - assert exception.message =~ "foreign_key: comments_post_id_other" - - {:error, changeset} = - changeset - |> Ecto.Changeset.foreign_key_constraint(:post_id) - |> TestRepo.insert() - - assert changeset.errors == [post_id: {"does not exist", []}] - end - - @tag :foreign_key_constraint - test "assoc constraint" do - changeset = Ecto.Changeset.change(%Comment{post_id: 0}) - - exception = - assert_raise Ecto.ConstraintError, - ~r/constraint error when attempting to insert struct/, - fn -> - changeset - |> TestRepo.insert() - end - - assert exception.message =~ "foreign_key: comments_post_id_fkey" - assert exception.message =~ "The changeset has not defined any constraint." - - message = ~r/constraint error when attempting to insert struct/ - - exception = - assert_raise Ecto.ConstraintError, message, fn -> - changeset - |> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other) - |> TestRepo.insert() - end - - assert exception.message =~ "foreign_key: comments_post_id_other" - - {:error, changeset} = - changeset - |> Ecto.Changeset.assoc_constraint(:post) - |> TestRepo.insert() - - assert changeset.errors == [post: {"does not exist", []}] - end - - @tag :foreign_key_constraint - test "no assoc constraint error" do - user = TestRepo.insert!(%User{}) - TestRepo.insert!(%Permalink{user_id: user.id}) - - exception = - assert_raise Ecto.ConstraintError, - ~r/constraint error when attempting to delete struct/, - fn -> - TestRepo.delete!(user) - end - - assert exception.message =~ "foreign_key: permalinks_user_id_fkey" - assert exception.message =~ "The changeset has not defined any constraint." - end - - @tag :foreign_key_constraint - test "no assoc constraint with changeset mismatch" do - user = TestRepo.insert!(%User{}) - TestRepo.insert!(%Permalink{user_id: user.id}) - - message = ~r/constraint error when attempting to delete struct/ - - exception = - assert_raise Ecto.ConstraintError, message, fn -> - user - |> Ecto.Changeset.change() - |> Ecto.Changeset.no_assoc_constraint( - :permalink, - name: :permalinks_user_id_pther - ) - |> TestRepo.delete() - end - - assert exception.message =~ "foreign_key: permalinks_user_id_pther" - end - - @tag :foreign_key_constraint - test "no assoc constraint with changeset match" do - user = TestRepo.insert!(%User{}) - TestRepo.insert!(%Permalink{user_id: user.id}) - - {:error, changeset} = - user - |> Ecto.Changeset.change() - |> Ecto.Changeset.no_assoc_constraint(:permalink) - |> TestRepo.delete() - - assert changeset.errors == [ - permalink: {"is still associated with this entry", []} - ] - end - - test "insert and update with failing child foreign key" do - defmodule Order do - use Ecto.Integration.Schema - import Ecto.Changeset - - schema "orders" do - embeds_one(:item, Ecto.Integration.Item) - belongs_to(:comment, Ecto.Integration.Comment) - end - - def changeset(order, params) do - order - |> cast(params, [:comment_id]) - |> cast_embed(:item, with: &item_changeset/2) - |> cast_assoc(:comment, with: &comment_changeset/2) - end - - def item_changeset(item, params) do - item - |> cast(params, [:price]) - end - - def comment_changeset(comment, params) do - comment - |> cast(params, [:post_id, :text]) - |> cast_assoc(:post) - |> assoc_constraint(:post) - end - end - - changeset = - Order.changeset(struct(Order, %{}), %{ - item: %{price: 10}, - comment: %{text: "1", post_id: 0} - }) - - assert %Ecto.Changeset{} = changeset.changes.item - - {:error, changeset} = TestRepo.insert(changeset) - assert %Ecto.Changeset{} = changeset.changes.item - - order = - TestRepo.insert!(Order.changeset(struct(Order, %{}), %{})) - |> TestRepo.preload([:comment]) - - changeset = - Order.changeset(order, %{ - item: %{price: 10}, - comment: %{text: "1", post_id: 0} - }) - - assert %Ecto.Changeset{} = changeset.changes.item - - {:error, changeset} = TestRepo.update(changeset) - assert %Ecto.Changeset{} = changeset.changes.item - end - - test "unsafe_validate_unique/3" do - {:ok, inserted_post} = - TestRepo.insert(%Post{title: "Greetings", text: "hi"}) - - new_post_changeset = - Post.changeset(%Post{}, %{title: "Greetings", text: "ho"}) - - changeset = - Ecto.Changeset.unsafe_validate_unique( - new_post_changeset, - [:title], - TestRepo - ) - - assert changeset.errors[:title] == - {"has already been taken", - validation: :unsafe_unique, fields: [:title]} - - changeset = - Ecto.Changeset.unsafe_validate_unique( - new_post_changeset, - [:title, :text], - TestRepo - ) - - assert changeset.errors[:title] == nil - - update_changeset = Post.changeset(inserted_post, %{text: "ho"}) - - changeset = - Ecto.Changeset.unsafe_validate_unique( - update_changeset, - [:title], - TestRepo - ) - - # cannot conflict with itself - assert changeset.errors[:title] == nil - end - - test "get(!)" do - post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) - post2 = TestRepo.insert!(%Post{title: "2", text: "hai"}) - - assert post1 == TestRepo.get(Post, post1.id) - # With casting - assert post2 == TestRepo.get(Post, to_string(post2.id)) - - assert post1 == TestRepo.get!(Post, post1.id) - # With casting - assert post2 == TestRepo.get!(Post, to_string(post2.id)) - - TestRepo.delete!(post1) - - assert nil == TestRepo.get(Post, post1.id) - - assert_raise Ecto.NoResultsError, fn -> - TestRepo.get!(Post, post1.id) - end - end - - test "get(!) with custom source" do - custom = Ecto.put_meta(%Custom{}, source: "posts") - custom = TestRepo.insert!(custom) - bid = custom.bid - - assert %Custom{bid: ^bid, __meta__: %{source: {nil, "posts"}}} = - TestRepo.get(from(c in {"posts", Custom}), bid) - end - - test "get(!) with binary_id" do - custom = TestRepo.insert!(%Custom{}) - bid = custom.bid - assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid) - end - - test "get_by(!)" do - post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) - post2 = TestRepo.insert!(%Post{title: "2", text: "hello"}) - - assert post1 == TestRepo.get_by(Post, id: post1.id) - assert post1 == TestRepo.get_by(Post, text: post1.text) - assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text) - # With casting - assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) - assert nil == TestRepo.get_by(Post, text: "hey") - assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey") - - assert post1 == TestRepo.get_by!(Post, id: post1.id) - assert post1 == TestRepo.get_by!(Post, text: post1.text) - assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text) - # With casting - assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) - - assert post1 == TestRepo.get_by!(Post, %{id: post1.id}) - - assert_raise Ecto.NoResultsError, fn -> - TestRepo.get_by!(Post, id: post2.id, text: "hey") - end - end - - test "first, last and one(!)" do - post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) - post2 = TestRepo.insert!(%Post{title: "2", text: "hai"}) - - assert post1 == Post |> first |> TestRepo.one() - assert post2 == Post |> last |> TestRepo.one() - - query = from(p in Post, order_by: p.title) - assert post1 == query |> first |> TestRepo.one() - assert post2 == query |> last |> TestRepo.one() - - query = from(p in Post, order_by: [desc: p.title], limit: 10) - assert post2 == query |> first |> TestRepo.one() - assert post1 == query |> last |> TestRepo.one() - - query = from(p in Post, where: is_nil(p.id)) - refute query |> first |> TestRepo.one() - refute query |> first |> TestRepo.one() - - assert_raise Ecto.NoResultsError, fn -> - query |> first |> TestRepo.one!() - end - - assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one!() end - end - - test "aggregate" do - assert TestRepo.aggregate(Post, :max, :visits) == nil - - TestRepo.insert!(%Post{visits: 10}) - TestRepo.insert!(%Post{visits: 12}) - TestRepo.insert!(%Post{visits: 14}) - TestRepo.insert!(%Post{visits: 14}) - - # Barebones - assert TestRepo.aggregate(Post, :max, :visits) == 14 - assert TestRepo.aggregate(Post, :min, :visits) == 10 - assert TestRepo.aggregate(Post, :count, :visits) == 4 - assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits)) - assert "12" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits)) - - # With order_by - query = from(Post, order_by: [asc: :visits]) - assert TestRepo.aggregate(query, :max, :visits) == 14 - - # With order_by and limit - query = from(Post, order_by: [asc: :visits], limit: 2) - assert TestRepo.aggregate(query, :max, :visits) == 12 - - # With distinct - query = from(Post, distinct: true) - assert TestRepo.aggregate(query, :count, :visits) == 3 - end - - test "insert all" do - assert {2, nil} = - TestRepo.insert_all("comments", [ - [text: "1"], - %{text: "2", lock_version: 2} - ]) - - assert {2, nil} = - TestRepo.insert_all({"comments", Comment}, [ - [text: "3"], - %{text: "4", lock_version: 2} - ]) - - assert [ - %Comment{text: "1", lock_version: 1}, - %Comment{text: "2", lock_version: 2}, - %Comment{text: "3", lock_version: 1}, - %Comment{text: "4", lock_version: 2} - ] = TestRepo.all(Comment) - - assert {2, nil} = - TestRepo.insert_all(Post, [ - [uuid: Ecto.UUID.generate()], - [uuid: Ecto.UUID.generate()] - ]) - - assert [%Post{}, %Post{}] = TestRepo.all(Post) - - assert {0, nil} = TestRepo.insert_all("posts", []) - assert {0, nil} = TestRepo.insert_all({"posts", Post}, []) - end - - @tag :invalid_prefix - test "insert all with invalid prefix" do - assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops")) - end - - @tag :returning - test "insert all with returning with schema" do - assert {0, []} = TestRepo.insert_all(Comment, [], returning: true) - assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false) - - {2, [c1, c2]} = - TestRepo.insert_all( - Comment, - [[text: "1"], [text: "2"]], - returning: [:id, :text] - ) - - assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1 - assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2 - - {2, [c1, c2]} = - TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true) - - assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1 - assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2 - end - - @tag :returning - test "insert all with returning with schema with field source" do - assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true) - assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false) - - {2, [c1, c2]} = - TestRepo.insert_all( - Permalink, - [[url: "1"], [url: "2"]], - returning: [:id, :url] - ) - - assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1 - assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2 - - {2, [c1, c2]} = - TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true) - - assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1 - assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2 - end - - @tag :returning - test "insert all with returning without schema" do - {2, [c1, c2]} = - TestRepo.insert_all( - "comments", - [[text: "1"], [text: "2"]], - returning: [:id, :text] - ) - - assert %{id: _, text: "1"} = c1 - assert %{id: _, text: "2"} = c2 - - assert_raise ArgumentError, fn -> - TestRepo.insert_all( - "comments", - [[text: "1"], [text: "2"]], - returning: true - ) - end - end - - test "insert all with dumping" do - datetime = ~N[2014-01-16 20:26:51.000000] - - assert {2, nil} = - TestRepo.insert_all(Post, [ - %{inserted_at: datetime, uuid: Ecto.UUID.generate()}, - %{title: "date", uuid: Ecto.UUID.generate()} - ]) - - assert [ - %Post{inserted_at: ^datetime, title: nil}, - %Post{inserted_at: nil, title: "date"} - ] = TestRepo.all(Post) - end - - test "insert all autogenerates for binary_id type" do - custom = TestRepo.insert!(%Custom{bid: nil}) - assert custom.bid - assert TestRepo.get(Custom, custom.bid) - assert TestRepo.delete!(custom) - refute TestRepo.get(Custom, custom.bid) - - uuid = Ecto.UUID.generate() - - assert {2, nil} = - TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}]) - - assert [%Custom{bid: bid2, uuid: nil}, %Custom{bid: bid1, uuid: ^uuid}] = - Enum.sort_by(TestRepo.all(Custom), & &1.uuid) - - assert bid1 && bid2 - assert custom.bid != bid1 - assert custom.bid == bid2 - end - - test "update all" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) - - assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"]) - - assert %Post{title: "x"} = TestRepo.get(Post, id1) - assert %Post{title: "x"} = TestRepo.get(Post, id2) - assert %Post{title: "x"} = TestRepo.get(Post, id3) - - assert {3, nil} = - TestRepo.update_all("posts", [set: [title: nil]], returning: false) - - assert %Post{title: nil} = TestRepo.get(Post, id1) - assert %Post{title: nil} = TestRepo.get(Post, id2) - assert %Post{title: nil} = TestRepo.get(Post, id3) - end - - @tag :invalid_prefix - test "update all with invalid prefix" do - assert catch_error( - TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops") - ) - end - - @tag :returning - test "update all with returning with schema" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) - - assert {3, posts} = - TestRepo.update_all(Post, [set: [title: "x"]], returning: true) - - [p1, p2, p3] = Enum.sort_by(posts, & &1.id) - assert %Post{id: ^id1, title: "x"} = p1 - assert %Post{id: ^id2, title: "x"} = p2 - assert %Post{id: ^id3, title: "x"} = p3 - - assert {3, posts} = - TestRepo.update_all( - Post, - [set: [visits: 11]], - returning: [:id, :visits] - ) - - [p1, p2, p3] = Enum.sort_by(posts, & &1.id) - assert %Post{id: ^id1, title: nil, visits: 11} = p1 - assert %Post{id: ^id2, title: nil, visits: 11} = p2 - assert %Post{id: ^id3, title: nil, visits: 11} = p3 - end - - @tag :returning - test "update all with returning without schema" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) - - assert {3, posts} = - TestRepo.update_all( - "posts", - [set: [title: "x"]], - returning: [:id, :title] - ) - - [p1, p2, p3] = Enum.sort_by(posts, & &1.id) - assert p1 == %{id: "#{id1}", title: "x"} - assert p2 == %{id: "#{id2}", title: "x"} - assert p3 == %{id: "#{id3}", title: "x"} - end - - test "update all with filter" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) - - query = - from( - p in Post, - where: p.title == "1" or p.title == "2", - update: [set: [text: ^"y"]] - ) - - assert {2, nil} = TestRepo.update_all(query, set: [title: "x"]) - - assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1) - assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2) - assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3) - end - - test "update all no entries" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) - - query = from(p in Post, where: p.title == "4") - assert {0, []} = TestRepo.update_all(query, set: [title: "x"]) - - assert %Post{title: "1"} = TestRepo.get(Post, id1) - assert %Post{title: "2"} = TestRepo.get(Post, id2) - assert %Post{title: "3"} = TestRepo.get(Post, id3) - end - - test "update all increment syntax" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1}) - - # Positive - query = from(p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]) - assert {2, nil} = TestRepo.update_all(query, []) - - assert %Post{visits: 2} = TestRepo.get(Post, id1) - assert %Post{visits: 3} = TestRepo.get(Post, id2) - - # Negative - query = - from(p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]) - - assert {2, nil} = TestRepo.update_all(query, []) - - assert %Post{visits: 1} = TestRepo.get(Post, id1) - assert %Post{visits: 2} = TestRepo.get(Post, id2) - end - - @tag :id_type - test "update all with casting and dumping on id type field" do - assert %Post{id: id1} = TestRepo.insert!(%Post{}) - assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)]) - assert %Post{counter: ^id1} = TestRepo.get(Post, id1) - end - - test "update all with casting and dumping" do - text = "hai" - datetime = ~N[2014-01-16 20:26:51.000000] - assert %Post{id: id} = TestRepo.insert!(%Post{}) - - assert {1, nil} = - TestRepo.update_all(Post, set: [text: text, inserted_at: datetime]) - - assert %Post{text: "hai", inserted_at: ^datetime} = TestRepo.get(Post, id) - end - - test "delete all" do - assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"}) - assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"}) - - assert {3, nil} = TestRepo.delete_all(Post, returning: false) - assert [] = TestRepo.all(Post) - end - - @tag :invalid_prefix - test "delete all with invalid prefix" do - assert catch_error(TestRepo.delete_all(Post, prefix: "oops")) - end - - @tag :returning - test "delete all with returning with schema" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) - - assert {3, posts} = TestRepo.delete_all(Post, returning: true) - - [p1, p2, p3] = Enum.sort_by(posts, & &1.id) - assert %Post{id: ^id1, title: "1"} = p1 - assert %Post{id: ^id2, title: "2"} = p2 - assert %Post{id: ^id3, title: "3"} = p3 - end - - @tag :returning - test "delete all with returning without schema" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) - - assert {3, posts} = TestRepo.delete_all("posts", returning: [:id, :title]) - - [p1, p2, p3] = Enum.sort_by(posts, & &1.id) - assert p1 == %{id: "#{id1}", title: "1"} - assert p2 == %{id: "#{id2}", title: "2"} - assert p3 == %{id: "#{id3}", title: "3"} - end - - test "delete all with filter" do - assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"}) - assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"}) - - query = from(p in Post, where: p.title == "1" or p.title == "2") - assert {2, nil} = TestRepo.delete_all(query) - assert [%Post{}] = TestRepo.all(Post) - end - - test "delete all no entries" do - assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) - assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) - - query = from(p in Post, where: p.title == "4") - assert {0, []} = TestRepo.delete_all(query) - assert %Post{title: "1"} = TestRepo.get(Post, id1) - assert %Post{title: "2"} = TestRepo.get(Post, id2) - assert %Post{title: "3"} = TestRepo.get(Post, id3) - end - - test "virtual field" do - assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - assert TestRepo.get(Post, id).temp == "temp" - end - - ## Query syntax - - defmodule Foo do - defstruct [:title] - end - - describe "query select" do - test "expressions" do - %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - - assert [{"1", "hai"}] == - TestRepo.all(from(p in Post, select: {p.title, p.text})) - - assert [["1", "hai"]] == - TestRepo.all(from(p in Post, select: [p.title, p.text])) - - assert [%{:title => "1", 3 => "hai", "text" => "hai"}] == - TestRepo.all( - from( - p in Post, - select: %{ - :title => p.title, - "text" => p.text, - 3 => p.text - } - ) - ) - - assert [%{:title => "1", "1" => "hai", "text" => "hai"}] == - TestRepo.all( - from( - p in Post, - select: %{ - :title => p.title, - p.title => p.text, - "text" => p.text - } - ) - ) - - assert [%Foo{title: "1"}] == - TestRepo.all(from(p in Post, select: %Foo{title: p.title})) - end - - test "map update" do - %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) - - assert [%Post{:title => "new title", text: "hai"}] = - TestRepo.all(from(p in Post, select: %{p | title: "new title"})) - - assert [%Post{title: "new title", text: "hai"}] = - TestRepo.all( - from(p in Post, select: %Post{p | title: "new title"}) - ) - - assert_raise KeyError, fn -> - TestRepo.all(from(p in Post, select: %{p | unknown: "new title"})) - end - - assert_raise BadMapError, fn -> - TestRepo.all(from(p in Post, select: %{p.title | title: "new title"})) - end - - assert_raise BadStructError, fn -> - TestRepo.all(from(p in Post, select: %Foo{p | title: p.title})) - end - end - - test "take with structs" do - %{id: pid1} = TestRepo.insert!(%Post{title: "1"}) - %{id: pid2} = TestRepo.insert!(%Post{title: "2"}) - %{id: pid3} = TestRepo.insert!(%Post{title: "3"}) - - [p1, p2, p3] = - Post |> select([p], struct(p, [:title])) |> order_by([:title]) - |> TestRepo.all() - - refute p1.id - assert p1.title == "1" - assert match?(%Post{}, p1) - refute p2.id - assert p2.title == "2" - assert match?(%Post{}, p2) - refute p3.id - assert p3.title == "3" - assert match?(%Post{}, p3) - - [p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all() - assert %Post{id: ^pid1} = p1 - assert %Post{id: ^pid2} = p2 - assert %Post{id: ^pid3} = p3 - end - - test "take with maps" do - %{id: pid1} = TestRepo.insert!(%Post{title: "1"}) - %{id: pid2} = TestRepo.insert!(%Post{title: "2"}) - %{id: pid3} = TestRepo.insert!(%Post{title: "3"}) - - [p1, p2, p3] = - "posts" |> select([p], map(p, [:title])) |> order_by([:title]) - |> TestRepo.all() - - assert p1 == %{title: "1"} - assert p2 == %{title: "2"} - assert p3 == %{title: "3"} - - [p1, p2, p3] = - "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all() - - assert p1 == %{id: "#{pid1}"} - assert p2 == %{id: "#{pid2}"} - assert p3 == %{id: "#{pid3}"} - end - - test "take with preload assocs" do - %{id: pid} = TestRepo.insert!(%Post{title: "post"}) - TestRepo.insert!(%Comment{post_id: pid, text: "comment"}) - fields = [:id, :title, comments: [:text, :post_id]] - - [p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all() - assert %Post{title: "post"} = p - assert [%Comment{text: "comment"}] = p.comments - - [p] = - Post |> preload(:comments) |> select([p], struct(p, ^fields)) - |> TestRepo.all() - - assert %Post{title: "post"} = p - assert [%Comment{text: "comment"}] = p.comments - - [p] = - Post |> preload(:comments) |> select([p], map(p, ^fields)) - |> TestRepo.all() - - assert p == %{ - id: pid, - title: "post", - comments: [%{text: "comment", post_id: pid}] - } - end - - test "take with nil preload assoc" do - %{id: cid} = TestRepo.insert!(%Comment{text: "comment"}) - fields = [:id, :text, post: [:title]] - - [c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all() - assert %Comment{id: ^cid, text: "comment", post: nil} = c - - [c] = - Comment |> preload(:post) |> select([c], struct(c, ^fields)) - |> TestRepo.all() - - assert %Comment{id: ^cid, text: "comment", post: nil} = c - - [c] = - Comment |> preload(:post) |> select([c], map(c, ^fields)) - |> TestRepo.all() - - assert c == %{id: cid, text: "comment", post: nil} - end - - test "take with join assocs" do - %{id: pid} = TestRepo.insert!(%Post{title: "post"}) - %{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"}) - fields = [:id, :title, comments: [:text, :post_id, :id]] - - query = - from( - p in Post, - where: p.id == ^pid, - join: c in assoc(p, :comments), - preload: [comments: c] - ) - - p = TestRepo.one(from(q in query, select: ^fields)) - assert %Post{title: "post"} = p - assert [%Comment{text: "comment"}] = p.comments - - p = TestRepo.one(from(q in query, select: struct(q, ^fields))) - assert %Post{title: "post"} = p - assert [%Comment{text: "comment"}] = p.comments - - p = TestRepo.one(from(q in query, select: map(q, ^fields))) - - assert p == %{ - id: pid, - title: "post", - comments: [%{text: "comment", post_id: pid, id: cid}] - } - end - - test "take with single nil column" do - %Post{} = TestRepo.insert!(%Post{title: "1", counter: nil}) - - assert %{counter: nil} = - TestRepo.one( - from(p in Post, where: p.title == "1", select: [:counter]) - ) - end - - test "field source" do - TestRepo.insert!(%Permalink{url: "url"}) - assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all() - assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all() - end - - test "merge" do - %Post{} = TestRepo.insert!(%Post{title: "1", counter: nil}) - - # Merge on source - assert [%Post{title: "2"}] = - Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all() - - assert [%Post{title: "2"}] = - Post |> select([p], p) |> select_merge([p], %{title: "2"}) - |> TestRepo.all() - - # Merge on struct - assert [%Post{title: "2"}] = - Post - |> select([p], merge(%Post{title: p.title}, %Post{title: "2"})) - |> TestRepo.all() - - assert [%Post{title: "2"}] = - Post - |> select([p], %Post{title: p.title}) - |> select_merge([p], %Post{title: "2"}) - |> TestRepo.all() - - assert [%Post{title: "2"}] = - Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) - |> TestRepo.all() - - assert [%Post{title: "2"}] = - Post - |> select([p], %Post{title: p.title}) - |> select_merge([p], %{title: "2"}) - |> TestRepo.all() - - # Merge on map - assert [%{title: "2"}] = - Post |> select([p], merge(%{title: p.title}, %{title: "2"})) - |> TestRepo.all() - - assert [%{title: "2"}] = - Post - |> select([p], %{title: p.title}) - |> select_merge([p], %{title: "2"}) - |> TestRepo.all() - - # Merge errors - assert_raise ArgumentError, - ~r/can only merge with a struct on the right side when both sides represent the same struct/, - fn -> - Post - |> select([p], merge(%{title: p.title}, %Post{title: "2"})) - |> TestRepo.all() - end - - assert_raise ArgumentError, - ~r/cannot merge because the left side is not a map/, - fn -> - Post |> select([p], merge(p.title, %{title: "2"})) - |> TestRepo.all() - end - - assert_raise ArgumentError, - ~r/cannot merge because the right side is not a map/, - fn -> - Post |> select([p], merge(%{title: "2"}, p.title)) - |> TestRepo.all() - end - end - end - - test "query count distinct" do - TestRepo.insert!(%Post{title: "1"}) - TestRepo.insert!(%Post{title: "1"}) - TestRepo.insert!(%Post{title: "2"}) - - assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all() - - assert [2] == - Post |> select([p], count(p.title, :distinct)) |> TestRepo.all() - end - - test "query where interpolation" do - post1 = TestRepo.insert!(%Post{text: "x", title: "hello"}) - post2 = TestRepo.insert!(%Post{text: "y", title: "goodbye"}) - - assert [post1, post2] == - Post |> where([], []) |> TestRepo.all() |> Enum.sort_by(& &1.id) - - assert [post1] == Post |> where([], title: "hello") |> TestRepo.all() - - assert [post1] == - Post |> where([], title: "hello", id: ^post1.id) |> TestRepo.all() - - params0 = [] - params1 = [title: "hello"] - params2 = [title: "hello", id: post1.id] - - assert [post1, post2] == - from(Post, where: ^params0) |> TestRepo.all() - |> Enum.sort_by(& &1.id) - - assert [post1] == from(Post, where: ^params1) |> TestRepo.all() - assert [post1] == from(Post, where: ^params2) |> TestRepo.all() - - post3 = TestRepo.insert!(%Post{text: "y", title: "goodbye", uuid: nil}) - params3 = [title: "goodbye", uuid: post3.uuid] - assert [post3] == from(Post, where: ^params3) |> TestRepo.all() - end - - ## Logging - - test "log entry logged on query" do - log = fn entry -> - assert %Ecto.LogEntry{result: {:ok, _}} = entry - assert is_integer(entry.query_time) and entry.query_time >= 0 - assert is_integer(entry.decode_time) and entry.query_time >= 0 - assert is_integer(entry.queue_time) and entry.queue_time >= 0 - send(self(), :logged) - end - - Process.put(:on_log, log) - - _ = TestRepo.all(Post) - assert_received :logged - end - - test "log entry not logged when log is false" do - Process.put(:on_log, fn _ -> flunk("logged") end) - TestRepo.insert!(%Post{title: "1"}, log: false) - end - - describe "upsert via insert" do - @describetag :upsert - - test "on conflict raise" do - {:ok, inserted} = - TestRepo.insert(%Post{title: "first"}, on_conflict: :raise) - - assert catch_error( - TestRepo.insert( - %Post{id: inserted.id, title: "second"}, - on_conflict: :raise - ) - ) - end - - test "on conflict ignore" do - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - {:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing) - assert inserted.id - assert inserted.__meta__.state == :loaded - - {:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing) - assert not_inserted.id == nil - assert not_inserted.__meta__.state == :loaded - end - - @tag :with_conflict_target - test "on conflict and associations" do - on_conflict = [set: [title: "second"]] - - post = %Post{ - uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e", - title: "first", - comments: [%Comment{}] - } - - {:ok, inserted} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - assert inserted.id - end - - @tag :with_conflict_target - test "on conflict with inc" do - uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e" - post = %Post{title: "first", uuid: uuid} - {:ok, _} = TestRepo.insert(post) - post = %{title: "upsert", uuid: uuid} - - TestRepo.insert_all( - Post, - [post], - on_conflict: [inc: [visits: 1]], - conflict_target: :uuid - ) - end - - @tag :with_conflict_target - test "on conflict ignore and conflict target" do - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - - {:ok, inserted} = - TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) - - assert inserted.id - - # Error on non-conflict target - assert catch_error( - TestRepo.insert( - post, - on_conflict: :nothing, - conflict_target: [:id] - ) - ) - - # Error on conflict target - {:ok, not_inserted} = - TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) - - assert not_inserted.id == nil - end - - @tag :without_conflict_target - test "on conflict keyword list" do - on_conflict = [set: [title: "second"]] - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict) - assert inserted.id - - {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict) - assert updated.id == inserted.id - assert updated.title != "second" - assert TestRepo.get!(Post, inserted.id).title == "second" - end - - @tag :with_conflict_target - test "on conflict keyword list and conflict target" do - on_conflict = [set: [title: "second"]] - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - - {:ok, inserted} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - assert inserted.id - - # Error on non-conflict target - assert catch_error( - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:id] - ) - ) - - {:ok, updated} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - assert updated.id == inserted.id - assert updated.title != "second" - assert TestRepo.get!(Post, inserted.id).title == "second" - end - - @tag :returning - @tag :with_conflict_target - test "on conflict keyword list and conflict target and returning" do - {:ok, c1} = TestRepo.insert(%Post{}) - - {:ok, c2} = - TestRepo.insert( - %Post{id: c1.id}, - on_conflict: [set: [id: c1.id]], - conflict_target: [:id], - returning: [:id, :uuid] - ) - - {:ok, c3} = - TestRepo.insert( - %Post{id: c1.id}, - on_conflict: [set: [id: c1.id]], - conflict_target: [:id], - returning: true - ) - - {:ok, c4} = - TestRepo.insert( - %Post{id: c1.id}, - on_conflict: [set: [id: c1.id]], - conflict_target: [:id], - returning: false - ) - - assert c2.uuid == c1.uuid - assert c3.uuid == c1.uuid - assert c4.uuid != c1.uuid - end - - @tag :with_conflict_target - @tag :with_conflict_target_on_constraint - test "on conflict keyword list and conflict target on constraint" do - on_conflict = [set: [title: "new"]] - post = %Post{title: "old"} - - {:ok, inserted} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: {:constraint, :posts_pkey} - ) - - assert inserted.id - - {:ok, updated} = - TestRepo.insert( - %{post | id: inserted.id}, - on_conflict: on_conflict, - conflict_target: {:constraint, :posts_pkey} - ) - - assert updated.id == inserted.id - assert updated.title != "new" - assert TestRepo.get!(Post, inserted.id).title == "new" - end - - @tag :returning - @tag :with_conflict_target - test "on conflict keyword list and conflict target and returning and field source" do - TestRepo.insert!(%Permalink{url: "old"}) - - {:ok, c1} = - TestRepo.insert( - %Permalink{url: "old"}, - on_conflict: [set: [url: "new1"]], - conflict_target: [:url], - returning: [:url] - ) - - TestRepo.insert!(%Permalink{url: "old"}) - - {:ok, c2} = - TestRepo.insert( - %Permalink{url: "old"}, - on_conflict: [set: [url: "new2"]], - conflict_target: [:url], - returning: true - ) - - assert c1.url == "new1" - assert c2.url == "new2" - end - - @tag :returning - @tag :with_conflict_target - test "on conflict ignore and returning" do - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - - {:ok, inserted} = - TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) - - assert inserted.id - - {:ok, not_inserted} = - TestRepo.insert( - post, - on_conflict: :nothing, - conflict_target: [:uuid], - returning: true - ) - - assert not_inserted.id == nil - end - - @tag :without_conflict_target - test "on conflict query" do - on_conflict = from(Post, update: [set: [title: "second"]]) - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict) - assert inserted.id - - {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict) - assert updated.id == inserted.id - assert updated.title != "second" - assert TestRepo.get!(Post, inserted.id).title == "second" - end - - @tag :with_conflict_target - test "on conflict query and conflict target" do - on_conflict = from(Post, update: [set: [title: "second"]]) - post = %Post{title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"} - - {:ok, inserted} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - assert inserted.id - - # Error on non-conflict target - assert catch_error( - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:id] - ) - ) - - {:ok, updated} = - TestRepo.insert( - post, - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - assert updated.id == inserted.id - assert updated.title != "second" - assert TestRepo.get!(Post, inserted.id).title == "second" - end - - @tag :without_conflict_target - test "on conflict replace_all" do - post = %Post{ - title: "first", - text: "text", - uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e" - } - - {:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all) - assert inserted.id - - # Error on non-conflict target - post = %Post{ - id: inserted.id, - title: "updated", - text: "updated", - uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e" - } - - # Error on conflict target - post = TestRepo.insert!(post, on_conflict: :replace_all) - assert post.title == "updated" - assert post.text == "updated" - - assert TestRepo.all(from(p in Post, select: p.title)) == ["updated"] - assert TestRepo.all(from(p in Post, select: p.text)) == ["updated"] - assert TestRepo.all(from(p in Post, select: count(p.id))) == [1] - end - - @tag :with_conflict_target - test "on conflict replace_all and conflict target" do - post = %Post{ - title: "first", - text: "text", - uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e" - } - - {:ok, inserted} = - TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :id) - - assert inserted.id - - # Error on non-conflict target - post = %Post{ - id: inserted.id, - title: "updated", - text: "updated", - uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e" - } - - # Error on conflict target - post = - TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :id) - - assert post.title == "updated" - assert post.text == "updated" - - assert TestRepo.all(from(p in Post, select: p.title)) == ["updated"] - assert TestRepo.all(from(p in Post, select: p.text)) == ["updated"] - assert TestRepo.all(from(p in Post, select: count(p.id))) == [1] - end - end - - describe "upsert via insert_all" do - @describetag :upsert_all - - test "on conflict raise" do - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - {1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise) - assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise)) - end - - test "on conflict ignore" do - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - - assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == - {1, nil} - - # PG returns 0, MySQL returns 1 - {entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing) - assert entries == 0 or entries == 1 - - assert length(TestRepo.all(Post)) == 1 - end - - @tag :with_conflict_target - test "on conflict ignore and conflict target" do - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - - assert TestRepo.insert_all( - Post, - [post], - on_conflict: :nothing, - conflict_target: [:uuid] - ) == {1, nil} - - # Error on non-conflict target - assert catch_error( - TestRepo.insert_all( - Post, - [post], - on_conflict: :nothing, - conflict_target: [:id] - ) - ) - - # Error on conflict target - assert TestRepo.insert_all( - Post, - [post], - on_conflict: :nothing, - conflict_target: [:uuid] - ) == {0, nil} - end - - @tag :with_conflict_target - test "on conflict keyword list and conflict target" do - on_conflict = [set: [title: "second"]] - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - - {1, nil} = - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) - - # Error on non-conflict target - assert catch_error( - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:id] - ) - ) - - # Error on conflict target - assert TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) == {1, nil} - - assert TestRepo.all(from(p in Post, select: p.title)) == ["second"] - end - - @tag :with_conflict_target - @tag :returning - test "on conflict keyword list and conflict target and returning and source field" do - on_conflict = [set: [url: "new"]] - permalink = [url: "old"] - - assert {1, [%Permalink{url: "old"}]} = - TestRepo.insert_all( - Permalink, - [permalink], - on_conflict: on_conflict, - conflict_target: [:url], - returning: [:url] - ) - - assert {1, [%Permalink{url: "new"}]} = - TestRepo.insert_all( - Permalink, - [permalink], - on_conflict: on_conflict, - conflict_target: [:url], - returning: [:url] - ) - end - - @tag :with_conflict_target - test "on conflict query and conflict target" do - on_conflict = from(Post, update: [set: [title: "second"]]) - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - - assert TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) == {1, nil} - - # Error on non-conflict target - assert catch_error( - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:id] - ) - ) - - # Error on conflict target - assert TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) == {1, nil} - - assert TestRepo.all(from(p in Post, select: p.title)) == ["second"] - end - - @tag :returning - @tag :with_conflict_target - test "on conflict query and conflict target and returning" do - on_conflict = from(Post, update: [set: [title: "second"]]) - post = [title: "first", uuid: "6fa459ea-ee8a-3ca4-894e-db77e160355e"] - - {1, [%{id: id}]} = - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid], - returning: [:id] - ) - - # Error on non-conflict target - assert catch_error( - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:id], - returning: [:id] - ) - ) - - # Error on conflict target - {1, [%Post{id: ^id, title: "second"}]} = - TestRepo.insert_all( - Post, - [post], - on_conflict: on_conflict, - conflict_target: [:uuid], - returning: [:id, :title] - ) - end - - @tag :with_conflict_target - test "source (without an ecto schema) on conflict query and conflict target" do - on_conflict = [set: [title: "second"]] - {:ok, uuid} = Ecto.UUID.dump("6fa459ea-ee8a-3ca4-894e-db77e160355e") - post = [title: "first", uuid: uuid] - - assert TestRepo.insert_all( - "posts", - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) == {1, nil} - - # Error on non-conflict target - assert catch_error( - TestRepo.insert_all( - "posts", - [post], - on_conflict: on_conflict, - conflict_target: [:id] - ) - ) - - # Error on conflict target - assert TestRepo.insert_all( - "posts", - [post], - on_conflict: on_conflict, - conflict_target: [:uuid] - ) == {1, nil} - - assert TestRepo.all(from(p in Post, select: p.title)) == ["second"] - end - - @tag :without_conflict_target - test "on conflict replace_all" do - post_first = %Post{title: "first", public: true} - post_second = %Post{title: "second", public: false} - - {:ok, inserted_first} = - TestRepo.insert(post_first, on_conflict: :replace_all) - - {:ok, inserted_second} = - TestRepo.insert(post_second, on_conflict: :replace_all) - - assert inserted_first.id - assert inserted_second.id - assert TestRepo.all(from(p in Post, select: count(p.id))) == [2] - - # multiple record change value - changes = [ - %{id: inserted_first.id, title: "first_updated", text: "first_updated"}, - %{ - id: inserted_second.id, - title: "second_updated", - text: "second_updated" - } - ] - - TestRepo.insert_all(Post, changes, on_conflict: :replace_all) - - assert TestRepo.all(from(p in Post, select: count(p.id))) == [2] - - updated_first = TestRepo.get(Post, inserted_first.id) - assert updated_first.title == "first_updated" - assert updated_first.text == "first_updated" - - updated_first = TestRepo.get(Post, inserted_second.id) - assert updated_first.title == "second_updated" - assert updated_first.text == "second_updated" - end - - @tag :with_conflict_target - test "on conflict replace_all and conflict_target" do - post_first = %Post{title: "first", public: true} - post_second = %Post{title: "second", public: false} - - {:ok, inserted_first} = - TestRepo.insert( - post_first, - on_conflict: :replace_all, - conflict_target: :id - ) - - {:ok, inserted_second} = - TestRepo.insert( - post_second, - on_conflict: :replace_all, - conflict_target: :id - ) - - assert inserted_first.id - assert inserted_second.id - assert TestRepo.all(from(p in Post, select: count(p.id))) == [2] - - # multiple record change value - changes = [ - %{id: inserted_first.id, title: "first_updated", text: "first_updated"}, - %{ - id: inserted_second.id, - title: "second_updated", - text: "second_updated" - } - ] - - TestRepo.insert_all( - Post, - changes, - on_conflict: :replace_all, - conflict_target: :id - ) - - assert TestRepo.all(from(p in Post, select: count(p.id))) == [2] - - updated_first = TestRepo.get(Post, inserted_first.id) - assert updated_first.title == "first_updated" - assert updated_first.text == "first_updated" - - updated_first = TestRepo.get(Post, inserted_second.id) - assert updated_first.title == "second_updated" - assert updated_first.text == "second_updated" - end - end -end diff --git a/integration/mssql/cases/type.exs b/integration/mssql/cases/type.exs deleted file mode 100644 index da9196f..0000000 --- a/integration/mssql/cases/type.exs +++ /dev/null @@ -1,443 +0,0 @@ -Code.require_file("../support/types.exs", __DIR__) - -defmodule Ecto.Integration.TypeTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) - - alias Ecto.Integration.{Custom, Item, Order, Post, User, Tag} - alias Ecto.Integration.TestRepo - import Ecto.Query - - test "primitive types" do - integer = 1 - float = 0.1 - text = <<0, 1>> - uuid = "00010203-0405-4607-8809-0a0b0c0d0e0f" - datetime = ~N[2014-01-16 20:26:51.000000] - - TestRepo.insert!(%Post{ - text: text, - public: true, - visits: integer, - uuid: uuid, - counter: integer, - inserted_at: datetime, - intensity: float - }) - - # nil - assert [nil] = TestRepo.all(from(Post, select: nil)) - - # ID - assert [1] = - TestRepo.all( - from(p in Post, where: p.counter == ^integer, select: p.counter) - ) - - # Integers - assert [1] = - TestRepo.all( - from(p in Post, where: p.visits == ^integer, select: p.visits) - ) - - assert [1] = - TestRepo.all( - from(p in Post, where: p.visits == 1, select: p.visits) - ) - - # Floats - assert [0.1] = - TestRepo.all( - from( - p in Post, - where: p.intensity == ^float, - select: p.intensity - ) - ) - - assert [0.1] = - TestRepo.all( - from(p in Post, where: p.intensity == 0.1, select: p.intensity) - ) - - assert [1500.0] = TestRepo.all(from(p in Post, select: 1500.0)) - - # Booleans - assert [true] = - TestRepo.all( - from(p in Post, where: p.public == ^true, select: p.public) - ) - - assert [true] = - TestRepo.all( - from(p in Post, where: p.public == true, select: p.public) - ) - - # Binaries - # assert [^text] = - # TestRepo.all( - # from(p in Post, where: p.text == <<0, 1>>, select: p.text) - # ) - - # assert [^text] = - # TestRepo.all( - # from(p in Post, where: p.text == ^text, select: p.text) - # ) - - # UUID - assert [^uuid] = - TestRepo.all( - from(p in Post, where: p.uuid == ^uuid, select: p.uuid) - ) - - # NaiveDatetime - assert [^datetime] = - TestRepo.all( - from( - p in Post, - where: p.inserted_at == ^datetime, - select: p.inserted_at - ) - ) - - # Datetime - datetime = - (System.system_time(:seconds) * 1_000_000) - |> DateTime.from_unix!(:microseconds) - - TestRepo.insert!(%User{inserted_at: datetime}) - - assert [^datetime] = - TestRepo.all( - from( - u in User, - where: u.inserted_at == ^datetime, - select: u.inserted_at - ) - ) - end - - test "aggregate types" do - datetime = ~N[2014-01-16 20:26:51.000000] - TestRepo.insert!(%Post{inserted_at: datetime}) - query = from(p in Post, select: max(p.inserted_at)) - assert [^datetime] = TestRepo.all(query) - end - - test "tagged types" do - TestRepo.insert!(%Post{}) - - # Numbers - assert [1] = TestRepo.all(from(Post, select: type(^"1", :integer))) - assert [1.0] = TestRepo.all(from(Post, select: type(^1.0, :float))) - assert [1] = TestRepo.all(from(p in Post, select: type(^"1", p.visits))) - - assert [1.0] = - TestRepo.all(from(p in Post, select: type(^"1", p.intensity))) - - # Custom wrappers - assert [1] = - TestRepo.all( - from(Post, select: type(^"1", Elixir.Custom.Permalink)) - ) - - # Custom types - uuid = Ecto.UUID.generate() - assert [^uuid] = TestRepo.all(from(Post, select: type(^uuid, Ecto.UUID))) - end - - test "binary id type" do - assert %Custom{} = custom = TestRepo.insert!(%Custom{}) - bid = custom.bid - assert [^bid] = TestRepo.all(from(c in Custom, select: c.bid)) - - assert [^bid] = - TestRepo.all(from(c in Custom, select: type(^bid, :binary_id))) - end - - @tag :array_type - test "array type" do - ints = [1, 2, 3] - tag = TestRepo.insert!(%Tag{ints: ints}) - - assert TestRepo.all(from(t in Tag, where: t.ints == ^[], select: t.ints)) == - [] - - assert TestRepo.all( - from(t in Tag, where: t.ints == ^[1, 2, 3], select: t.ints) - ) == [ints] - - # Both sides interpolation - assert TestRepo.all( - from(t in Tag, where: ^"b" in ^["a", "b", "c"], select: t.ints) - ) == [ints] - - assert TestRepo.all( - from(t in Tag, where: ^"b" in [^"a", ^"b", ^"c"], select: t.ints) - ) == [ints] - - # Querying - assert TestRepo.all( - from(t in Tag, where: t.ints == [1, 2, 3], select: t.ints) - ) == [ints] - - assert TestRepo.all(from(t in Tag, where: 0 in t.ints, select: t.ints)) == - [] - - assert TestRepo.all(from(t in Tag, where: 1 in t.ints, select: t.ints)) == [ - ints - ] - - # Update - tag = TestRepo.update!(Ecto.Changeset.change(tag, ints: [3, 2, 1])) - assert TestRepo.get!(Tag, tag.id).ints == [3, 2, 1] - - # Update all - {1, _} = TestRepo.update_all(Tag, push: [ints: 0]) - assert TestRepo.get!(Tag, tag.id).ints == [3, 2, 1, 0] - - {1, _} = TestRepo.update_all(Tag, pull: [ints: 2]) - assert TestRepo.get!(Tag, tag.id).ints == [3, 1, 0] - end - - @tag :array_type - test "array type with custom types" do - uuids = ["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"] - TestRepo.insert!(%Tag{uuids: ["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"]}) - - assert TestRepo.all(from(t in Tag, where: t.uuids == ^[], select: t.uuids)) == - [] - - assert TestRepo.all( - from( - t in Tag, - where: t.uuids == ^["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"], - select: t.uuids - ) - ) == [uuids] - end - - @tag :array_type - test "array type with nil in array" do - tag = TestRepo.insert!(%Tag{ints: [1, nil, 3]}) - assert tag.ints == [1, nil, 3] - end - - @tag :map_type - test "untyped map" do - post1 = TestRepo.insert!(%Post{meta: %{"foo" => "bar", "baz" => "bat"}}) - post2 = TestRepo.insert!(%Post{meta: %{foo: "bar", baz: "bat"}}) - - assert TestRepo.all( - from(p in Post, where: p.id == ^post1.id, select: p.meta) - ) == [%{"foo" => "bar", "baz" => "bat"}] - - assert TestRepo.all( - from(p in Post, where: p.id == ^post2.id, select: p.meta) - ) == [%{"foo" => "bar", "baz" => "bat"}] - end - - @tag :map_type - test "typed map" do - post1 = - TestRepo.insert!(%Post{ - links: %{"foo" => "http://foo.com", "bar" => "http://bar.com"} - }) - - post2 = - TestRepo.insert!(%Post{ - links: %{foo: "http://foo.com", bar: "http://bar.com"} - }) - - assert TestRepo.all( - from(p in Post, where: p.id == ^post1.id, select: p.links) - ) == [%{"foo" => "http://foo.com", "bar" => "http://bar.com"}] - - assert TestRepo.all( - from(p in Post, where: p.id == ^post2.id, select: p.links) - ) == [%{"foo" => "http://foo.com", "bar" => "http://bar.com"}] - end - - @tag :map_type - test "map type on update" do - post = TestRepo.insert!(%Post{meta: %{"world" => "hello"}}) - assert TestRepo.get!(Post, post.id).meta == %{"world" => "hello"} - - post = - TestRepo.update!(Ecto.Changeset.change(post, meta: %{hello: "world"})) - - assert TestRepo.get!(Post, post.id).meta == %{"hello" => "world"} - - query = from(p in Post, where: p.id == ^post.id) - TestRepo.update_all(query, set: [meta: %{world: "hello"}]) - assert TestRepo.get!(Post, post.id).meta == %{"world" => "hello"} - end - - @tag :map_type - test "embeds one" do - item = %Item{price: 123, valid_at: ~D[2014-01-16]} - - order = - %Order{} - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_embed(:item, item) - - order = TestRepo.insert!(order) - dbitem = TestRepo.get!(Order, order.id).item - assert item.price == dbitem.price - assert item.valid_at == dbitem.valid_at - assert dbitem.id - - [dbitem] = TestRepo.all(from(o in Order, select: o.item)) - assert item.price == dbitem.price - assert item.valid_at == dbitem.valid_at - assert dbitem.id - - {1, _} = TestRepo.update_all(Order, set: [item: %{dbitem | price: 456}]) - assert TestRepo.get!(Order, order.id).item.price == 456 - end - - @tag :map_type - @tag :array_type - test "embeds many" do - item = %Item{price: 123, valid_at: ~D[2014-01-16]} - - tag = - %Tag{} - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_embed(:items, [item]) - - tag = TestRepo.insert!(tag) - - [dbitem] = TestRepo.get!(Tag, tag.id).items - assert item.price == dbitem.price - assert item.valid_at == dbitem.valid_at - assert dbitem.id - - [[dbitem]] = TestRepo.all(from(t in Tag, select: t.items)) - assert item.price == dbitem.price - assert item.valid_at == dbitem.valid_at - assert dbitem.id - - {1, _} = TestRepo.update_all(Tag, set: [items: [%{dbitem | price: 456}]]) - assert (TestRepo.get!(Tag, tag.id).items |> hd).price == 456 - end - - @tag :decimal_type - test "decimal type" do - decimal = Decimal.new("1.0") - TestRepo.insert!(%Post{cost: decimal}) - - assert [^decimal] = - TestRepo.all( - from(p in Post, where: p.cost == ^decimal, select: p.cost) - ) - - assert [^decimal] = - TestRepo.all( - from(p in Post, where: p.cost == ^1.0, select: p.cost) - ) - - assert [^decimal] = - TestRepo.all(from(p in Post, where: p.cost == ^1, select: p.cost)) - - assert [^decimal] = - TestRepo.all(from(p in Post, where: p.cost == 1.0, select: p.cost)) - - assert [^decimal] = - TestRepo.all(from(p in Post, where: p.cost == 1, select: p.cost)) - end - - @tag :decimal_type - test "typed aggregations" do - decimal = Decimal.new("1.0") - TestRepo.insert!(%Post{cost: decimal}) - - assert [1] = - TestRepo.all(from(p in Post, select: type(sum(p.cost), :integer))) - - assert [1.0] = - TestRepo.all(from(p in Post, select: type(sum(p.cost), :float))) - - # assert [^decimal] = - # TestRepo.all(from(p in Post, select: type(sum(p.cost), :decimal))) - end - - test "schemaless types" do - TestRepo.insert!(%Post{visits: 123}) - - assert [123] = - TestRepo.all(from(p in "posts", select: type(p.visits, :integer))) - end - - test "schemaless calendar types" do - datetime = ~N[2014-01-16 20:26:51] - assert {1, _} = TestRepo.insert_all("posts", [[inserted_at: datetime]]) - assert {1, _} = TestRepo.update_all("posts", set: [inserted_at: datetime]) - - assert [_] = - TestRepo.all( - from( - p in "posts", - where: p.inserted_at >= ^datetime, - select: p.inserted_at - ) - ) - - assert [_] = - TestRepo.all( - from( - p in "posts", - where: p.inserted_at in [^datetime], - select: p.inserted_at - ) - ) - - assert [_] = - TestRepo.all( - from( - p in "posts", - where: p.inserted_at in ^[datetime], - select: p.inserted_at - ) - ) - - datetime = - (System.system_time(:seconds) * 1_000_000) - |> DateTime.from_unix!(:microseconds) - - assert {1, _} = - TestRepo.insert_all("users", [ - [inserted_at: datetime, updated_at: datetime] - ]) - - assert {1, _} = TestRepo.update_all("users", set: [inserted_at: datetime]) - - assert [_] = - TestRepo.all( - from( - u in "users", - where: u.inserted_at >= ^datetime, - select: u.updated_at - ) - ) - - assert [_] = - TestRepo.all( - from( - u in "users", - where: u.inserted_at in [^datetime], - select: u.updated_at - ) - ) - - assert [_] = - TestRepo.all( - from( - u in "users", - where: u.inserted_at in ^[datetime], - select: u.updated_at - ) - ) - end -end diff --git a/integration/mssql/cases/assoc.exs b/integration/mssql/ecto/cases/assoc.exs similarity index 77% rename from integration/mssql/cases/assoc.exs rename to integration/mssql/ecto/cases/assoc.exs index 9a379f8..f9ded3e 100644 --- a/integration/mssql/cases/assoc.exs +++ b/integration/mssql/ecto/cases/assoc.exs @@ -1,8 +1,7 @@ -Code.require_file("../support/types.exs", __DIR__) +Code.require_file "../support/types.exs", __DIR__ defmodule Ecto.Integration.AssocTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) alias Ecto.Integration.TestRepo import Ecto.Query @@ -22,11 +21,11 @@ defmodule Ecto.Integration.AssocTest do %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) %Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) - [c1, c2] = TestRepo.all(Ecto.assoc(p1, :comments)) + [c1, c2] = TestRepo.all Ecto.assoc(p1, :comments) assert c1.id == cid1 assert c2.id == cid2 - [c1, c2, c3] = TestRepo.all(Ecto.assoc([p1, p2], :comments)) + [c1, c2, c3] = TestRepo.all Ecto.assoc([p1, p2], :comments) assert c1.id == cid1 assert c2.id == cid2 assert c3.id == cid3 @@ -36,15 +35,11 @@ defmodule Ecto.Integration.AssocTest do p1 = TestRepo.insert!(%Post{title: "1"}) p2 = TestRepo.insert!(%Post{title: "2"}) - %Permalink{id: lid1} = - TestRepo.insert!(%Permalink{url: "1", post_id: p1.id}) + %Permalink{id: lid1} = TestRepo.insert!(%Permalink{url: "1", post_id: p1.id}) + %Permalink{} = TestRepo.insert!(%Permalink{url: "2"}) + %Permalink{id: lid3} = TestRepo.insert!(%Permalink{url: "3", post_id: p2.id}) - %Permalink{} = TestRepo.insert!(%Permalink{url: "2"}) - - %Permalink{id: lid3} = - TestRepo.insert!(%Permalink{url: "3", post_id: p2.id}) - - [l1, l3] = TestRepo.all(Ecto.assoc([p1, p2], :permalink)) + [l1, l3] = TestRepo.all Ecto.assoc([p1, p2], :permalink) assert l1.id == lid1 assert l3.id == lid3 end @@ -57,7 +52,7 @@ defmodule Ecto.Integration.AssocTest do l2 = TestRepo.insert!(%Permalink{url: "2"}) l3 = TestRepo.insert!(%Permalink{url: "3", post_id: pid2}) - assert [p1, p2] = TestRepo.all(Ecto.assoc([l1, l2, l3], :post)) + assert [p1, p2] = TestRepo.all Ecto.assoc([l1, l2, l3], :post) assert p1.id == pid1 assert p2.id == pid2 end @@ -97,22 +92,17 @@ defmodule Ecto.Integration.AssocTest do %Comment{} = TestRepo.insert!(%Comment{post_id: p1.id, author_id: u2.id}) %Comment{} = TestRepo.insert!(%Comment{post_id: p2.id, author_id: u2.id}) - query = - Ecto.assoc([p1, p2], :comments_authors_permalinks) |> order_by([p], p.url) - + query = Ecto.assoc([p1, p2], :comments_authors_permalinks) |> order_by([p], p.url) assert [^pl2, ^pl1] = TestRepo.all(query) # Dynamic through - query = - Ecto.assoc([p1, p2], [:comments, :author, :permalink]) - |> order_by([p], p.url) - + query = Ecto.assoc([p1, p2], [:comments, :author, :permalink]) |> order_by([p], p.url) assert [^pl2, ^pl1] = TestRepo.all(query) end test "has_many through-through assoc trailing" do - p1 = TestRepo.insert!(%Post{}) - u1 = TestRepo.insert!(%User{}) + p1 = TestRepo.insert!(%Post{}) + u1 = TestRepo.insert!(%User{}) pl1 = TestRepo.insert!(%Permalink{user_id: u1.id, post_id: p1.id}) %Comment{} = TestRepo.insert!(%Comment{post_id: p1.id, author_id: u1.id}) @@ -133,21 +123,19 @@ defmodule Ecto.Integration.AssocTest do %User{id: uid1} = TestRepo.insert!(%User{name: "john"}) %User{id: uid2} = TestRepo.insert!(%User{name: "mary"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: uid1], - [post_id: p1.id, user_id: uid2], - [post_id: p2.id, user_id: uid2] - ]) + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: uid1], + [post_id: p1.id, user_id: uid2], + [post_id: p2.id, user_id: uid2]] - [u1, u2] = TestRepo.all(Ecto.assoc([p1], :users)) + [u1, u2] = TestRepo.all Ecto.assoc([p1], :users) assert u1.id == uid1 assert u2.id == uid2 - [u2] = TestRepo.all(Ecto.assoc([p2], :users)) + [u2] = TestRepo.all Ecto.assoc([p2], :users) assert u2.id == uid2 - [] = TestRepo.all(Ecto.assoc([p3], :users)) + [] = TestRepo.all Ecto.assoc([p3], :users) - [u1, u2, u2] = TestRepo.all(Ecto.assoc([p1, p2, p3], :users)) + [u1, u2, u2] = TestRepo.all Ecto.assoc([p1, p2, p3], :users) assert u1.id == uid1 assert u2.id == uid2 end @@ -158,9 +146,8 @@ defmodule Ecto.Integration.AssocTest do # Insert new changeset = %Post{title: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, %Permalink{url: "1"}) - post = TestRepo.insert!(changeset) assert post.permalink.id assert post.permalink.post_id == post.id @@ -171,9 +158,8 @@ defmodule Ecto.Integration.AssocTest do # Replace with new changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, %Permalink{url: "2"}) - post = TestRepo.update!(changeset) assert post.permalink.id assert post.permalink.post_id == post.id @@ -183,12 +169,10 @@ defmodule Ecto.Integration.AssocTest do # Replacing with existing existing = TestRepo.insert!(%Permalink{url: "3"}) - changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, existing) - post = TestRepo.update!(changeset) assert post.permalink.id assert post.permalink.post_id == post.id @@ -199,9 +183,8 @@ defmodule Ecto.Integration.AssocTest do # Replacing with nil (on_replace: :delete) changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, nil) - post = TestRepo.update!(changeset) refute post.permalink post = TestRepo.get!(from(Post, preload: [:permalink]), post.id) @@ -214,9 +197,8 @@ defmodule Ecto.Integration.AssocTest do # Insert new changeset = %User{name: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, %Permalink{url: "1"}) - user = TestRepo.insert!(changeset) assert user.permalink.id assert user.permalink.user_id == user.id @@ -227,9 +209,8 @@ defmodule Ecto.Integration.AssocTest do # Replace with new changeset = user - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, %Permalink{url: "2"}) - user = TestRepo.update!(changeset) assert user.permalink.id assert user.permalink.user_id == user.id @@ -240,9 +221,8 @@ defmodule Ecto.Integration.AssocTest do # Replacing with nil (on_replace: :nilify) changeset = user - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:permalink, nil) - user = TestRepo.update!(changeset) refute user.permalink user = TestRepo.get!(from(User, preload: [:permalink]), user.id) @@ -255,9 +235,8 @@ defmodule Ecto.Integration.AssocTest do # Insert new changeset = %Post{title: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:update_permalink, %Permalink{url: "1"}) - post = TestRepo.insert!(changeset) assert post.update_permalink.id assert post.update_permalink.post_id == post.id @@ -272,7 +251,6 @@ defmodule Ecto.Integration.AssocTest do post |> Ecto.Changeset.change() |> Ecto.Changeset.put_assoc(:update_permalink, %{url: "2"}) - post = TestRepo.update!(changeset) assert post.update_permalink.id == perma.id assert post.update_permalink.post_id == post.id @@ -285,7 +263,6 @@ defmodule Ecto.Integration.AssocTest do post |> Ecto.Changeset.cast(%{update_permalink: %{url: "3"}}, []) |> Ecto.Changeset.cast_assoc(:update_permalink) - post = TestRepo.update!(changeset) assert post.update_permalink.id == perma.id assert post.update_permalink.post_id == post.id @@ -294,34 +271,24 @@ defmodule Ecto.Integration.AssocTest do assert post.update_permalink.url == "3" # Replace with new struct - assert_raise RuntimeError, - ~r"you are only allowed\sto update the existing entry", - fn -> - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:update_permalink, %Permalink{ - url: "4" - }) - end + assert_raise RuntimeError, ~r"you are only allowed\sto update the existing entry", fn -> + post + |> Ecto.Changeset.change() + |> Ecto.Changeset.put_assoc(:update_permalink, %Permalink{url: "4"}) + end # Replace with existing struct - assert_raise RuntimeError, - ~r"you are only allowed\sto update the existing entry", - fn -> - post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc( - :update_permalink, - TestRepo.insert!(%Permalink{url: "5"}) - ) - end + assert_raise RuntimeError, ~r"you are only allowed\sto update the existing entry", fn -> + post + |> Ecto.Changeset.change() + |> Ecto.Changeset.put_assoc(:update_permalink, TestRepo.insert!(%Permalink{url: "5"})) + end # Replacing with nil (on_replace: :update) changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:update_permalink, nil) - post = TestRepo.update!(changeset) refute post.update_permalink post = TestRepo.get!(from(Post, preload: [:update_permalink]), post.id) @@ -331,15 +298,14 @@ defmodule Ecto.Integration.AssocTest do end test "has_many changeset assoc (on_replace: :delete)" do - c1 = TestRepo.insert!(%Comment{text: "1"}) + c1 = TestRepo.insert! %Comment{text: "1"} c2 = %Comment{text: "2"} # Inserting changeset = %Post{title: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:comments, [c2]) - post = TestRepo.insert!(changeset) [c2] = post.comments assert c2.id @@ -351,27 +317,23 @@ defmodule Ecto.Integration.AssocTest do # Updating changeset = post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:comments, [ - Ecto.Changeset.change(c1, text: "11"), - Ecto.Changeset.change(c2, text: "22") - ]) - + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:comments, [Ecto.Changeset.change(c1, text: "11"), + Ecto.Changeset.change(c2, text: "22")]) post = TestRepo.update!(changeset) - [c1, _c2] = post.comments |> Enum.sort_by(& &1.id) + [c1, _c2] = post.comments |> Enum.sort_by(&(&1.id)) assert c1.id assert c1.post_id == post.id post = TestRepo.get!(from(Post, preload: [:comments]), post.id) - [c1, c2] = post.comments |> Enum.sort_by(& &1.id) + [c1, c2] = post.comments |> Enum.sort_by(&(&1.id)) assert c1.text == "11" assert c2.text == "22" # Replacing (on_replace: :delete) changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:comments, []) - post = TestRepo.update!(changeset) assert post.comments == [] post = TestRepo.get!(from(Post, preload: [:comments]), post.id) @@ -381,15 +343,14 @@ defmodule Ecto.Integration.AssocTest do end test "has_many changeset assoc (on_replace: :nilify)" do - c1 = TestRepo.insert!(%Comment{text: "1"}) + c1 = TestRepo.insert! %Comment{text: "1"} c2 = %Comment{text: "2"} # Inserting changeset = %User{name: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:comments, [c1, c2]) - user = TestRepo.insert!(changeset) [c1, c2] = user.comments assert c1.id @@ -404,9 +365,8 @@ defmodule Ecto.Integration.AssocTest do # Replacing (on_replace: :nilify) changeset = user - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:comments, []) - user = TestRepo.update!(changeset) assert user.comments == [] user = TestRepo.get!(from(User, preload: [:comments]), user.id) @@ -416,15 +376,14 @@ defmodule Ecto.Integration.AssocTest do end test "many_to_many changeset assoc" do - u1 = TestRepo.insert!(%User{name: "1"}) + u1 = TestRepo.insert! %User{name: "1"} u2 = %User{name: "2"} # Inserting changeset = %Post{title: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:users, [u2]) - post = TestRepo.insert!(changeset) [u2] = post.users assert u2.id @@ -432,56 +391,47 @@ defmodule Ecto.Integration.AssocTest do [u2] = post.users assert u2.name == "2" - assert [1] == - TestRepo.all(from(j in "posts_users", select: count(j.post_id))) + assert [1] == TestRepo.all(from(j in "posts_users", select: count(j.post_id))) # Updating changeset = post - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:users, [ - Ecto.Changeset.change(u1, name: "11"), - Ecto.Changeset.change(u2, name: "22") - ]) - + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:users, [Ecto.Changeset.change(u1, name: "11"), + Ecto.Changeset.change(u2, name: "22")]) post = TestRepo.update!(changeset) - [u1, _u2] = post.users |> Enum.sort_by(& &1.id) + [u1, _u2] = post.users |> Enum.sort_by(&(&1.id)) assert u1.id post = TestRepo.get!(from(Post, preload: [:users]), post.id) - [u1, u2] = post.users |> Enum.sort_by(& &1.id) + [u1, u2] = post.users |> Enum.sort_by(&(&1.id)) assert u1.name == "11" assert u2.name == "22" - assert [2] == - TestRepo.all(from(j in "posts_users", select: count(j.post_id))) + assert [2] == TestRepo.all(from(j in "posts_users", select: count(j.post_id))) # Replacing (on_replace: :delete) changeset = post - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:users, []) - post = TestRepo.update!(changeset) assert post.users == [] post = TestRepo.get!(from(Post, preload: [:users]), post.id) assert post.users == [] - assert [0] == - TestRepo.all(from(j in "posts_users", select: count(j.post_id))) - + assert [0] == TestRepo.all(from(j in "posts_users", select: count(j.post_id))) assert [2] == TestRepo.all(from(c in User, select: count(c.id))) end test "many_to_many changeset assoc with schema" do - p1 = TestRepo.insert!(%Post{title: "1"}) + p1 = TestRepo.insert! %Post{title: "1"} p2 = %Post{title: "2"} # Inserting changeset = %User{name: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:schema_posts, [p2]) - user = TestRepo.insert!(changeset) [p2] = user.schema_posts assert p2.id @@ -489,7 +439,7 @@ defmodule Ecto.Integration.AssocTest do [p2] = user.schema_posts assert p2.title == "2" - [up2] = TestRepo.all(PostUser) |> Enum.sort_by(& &1.id) + [up2] = TestRepo.all(PostUser) |> Enum.sort_by(&(&1.id)) assert up2.post_id == p2.id assert up2.user_id == user.id assert up2.inserted_at @@ -498,21 +448,18 @@ defmodule Ecto.Integration.AssocTest do # Updating changeset = user - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_assoc(:schema_posts, [ - Ecto.Changeset.change(p1, title: "11"), - Ecto.Changeset.change(p2, title: "22") - ]) - + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:schema_posts, [Ecto.Changeset.change(p1, title: "11"), + Ecto.Changeset.change(p2, title: "22")]) user = TestRepo.update!(changeset) - [p1, _p2] = user.schema_posts |> Enum.sort_by(& &1.id) + [p1, _p2] = user.schema_posts |> Enum.sort_by(&(&1.id)) assert p1.id user = TestRepo.get!(from(User, preload: [:schema_posts]), user.id) - [p1, p2] = user.schema_posts |> Enum.sort_by(& &1.id) + [p1, p2] = user.schema_posts |> Enum.sort_by(&(&1.id)) assert p1.title == "11" assert p2.title == "22" - [_up2, up1] = TestRepo.all(PostUser) |> Enum.sort_by(& &1.id) + [_up2, up1] = TestRepo.all(PostUser) |> Enum.sort_by(&(&1.id)) assert up1.post_id == p1.id assert up1.user_id == user.id assert up1.inserted_at @@ -530,8 +477,7 @@ defmodule Ecto.Integration.AssocTest do custom |> Ecto.Changeset.change(%{}) |> Ecto.Changeset.put_assoc(:customs, []) - |> TestRepo.update!() - + |> TestRepo.update! assert [] = custom.customs custom = Custom |> TestRepo.get!(custom.bid) |> TestRepo.preload(:customs) @@ -546,37 +492,29 @@ defmodule Ecto.Integration.AssocTest do # Asserts that `unique_constraint` for `uuid` exists assert_raise Ecto.ConstraintError, fn -> - TestRepo.insert!(%Post{ - title: "another", - author_id: author.id, - uuid: p1.uuid - }) + TestRepo.insert!(%Post{title: "another", author_id: author.id, uuid: p1.uuid}) end - author = TestRepo.preload(author, [:posts]) - - posts_params = - Enum.map(author.posts, fn %Post{uuid: u} -> - %{uuid: u, title: "fresh"} - end) + author = TestRepo.preload author, [:posts] + posts_params = Enum.map author.posts, fn %Post{uuid: u} -> + %{uuid: u, title: "fresh"} + end # This will only work if we delete before performing inserts changeset = author |> Ecto.Changeset.cast(%{"posts" => posts_params}, ~w()) |> Ecto.Changeset.cast_assoc(:posts) - - author = TestRepo.update!(changeset) - assert Enum.map(author.posts, & &1.title) == ["fresh", "fresh"] + author = TestRepo.update! changeset + assert Enum.map(author.posts, &(&1.title)) == ["fresh", "fresh"] end test "belongs_to changeset assoc" do # Insert new changeset = %Permalink{url: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:post, %Post{title: "1"}) - perma = TestRepo.insert!(changeset) post = perma.post assert perma.post_id @@ -586,9 +524,8 @@ defmodule Ecto.Integration.AssocTest do # Replace with new changeset = perma - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:post, %Post{title: "2"}) - perma = TestRepo.update!(changeset) assert perma.post.id != post.id post = perma.post @@ -598,12 +535,10 @@ defmodule Ecto.Integration.AssocTest do # Replace with existing existing = TestRepo.insert!(%Post{title: "3"}) - changeset = perma - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:post, existing) - perma = TestRepo.update!(changeset) post = perma.post assert perma.post_id == post.id @@ -613,9 +548,8 @@ defmodule Ecto.Integration.AssocTest do # Replace with nil changeset = perma - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:post, nil) - perma = TestRepo.update!(changeset) assert perma.post == nil assert perma.post_id == nil @@ -625,9 +559,8 @@ defmodule Ecto.Integration.AssocTest do # Insert new changeset = %Permalink{url: "1"} - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:update_post, %Post{title: "1"}) - perma = TestRepo.insert!(changeset) post = perma.update_post assert perma.post_id @@ -639,7 +572,6 @@ defmodule Ecto.Integration.AssocTest do perma |> Ecto.Changeset.cast(%{update_post: %{title: "2"}}, []) |> Ecto.Changeset.cast_assoc(:update_post) - perma = TestRepo.update!(changeset) assert perma.update_post.id == post.id post = perma.update_post @@ -650,9 +582,8 @@ defmodule Ecto.Integration.AssocTest do # Replace with nil changeset = perma - |> Ecto.Changeset.change() + |> Ecto.Changeset.change |> Ecto.Changeset.put_assoc(:update_post, nil) - perma = TestRepo.update!(changeset) assert perma.update_post == nil assert perma.post_id == nil @@ -665,7 +596,7 @@ defmodule Ecto.Integration.AssocTest do title: "belongs_to", comments: [ %Comment{text: "child 1"}, - %Comment{text: "child 2"} + %Comment{text: "child 2"}, ] } } @@ -691,12 +622,11 @@ defmodule Ecto.Integration.AssocTest do assert post.comments == [] end - test "inserting changeset with empty associations" do + test "inserting changeset with empty cast associations" do changeset = %Permalink{} |> Ecto.Changeset.cast(%{url: "root", post: nil}, [:url]) |> Ecto.Changeset.cast_assoc(:post) - permalink = TestRepo.insert!(changeset) assert permalink.post == nil @@ -704,7 +634,22 @@ defmodule Ecto.Integration.AssocTest do %Post{} |> Ecto.Changeset.cast(%{title: "root", comments: []}, [:title]) |> Ecto.Changeset.cast_assoc(:comments) + post = TestRepo.insert!(changeset) + assert post.comments == [] + end + test "inserting changeset with empty put associations" do + changeset = + %Permalink{} + |> Ecto.Changeset.change() + |> Ecto.Changeset.put_assoc(:post, nil) + permalink = TestRepo.insert!(changeset) + assert permalink.post == nil + + changeset = + %Post{} + |> Ecto.Changeset.change() + |> Ecto.Changeset.put_assoc(:comments, []) post = TestRepo.insert!(changeset) assert post.comments == [] end @@ -727,8 +672,7 @@ defmodule Ecto.Integration.AssocTest do TestRepo.insert!(%Comment{author_id: user.id}) TestRepo.delete!(user) - author_ids = - Comment |> TestRepo.all() |> Enum.map(fn comment -> comment.author_id end) + author_ids = Comment |> TestRepo.all() |> Enum.map(fn(comment) -> comment.author_id end) assert author_ids == [nil, nil] refute Process.get(Comment) @@ -749,24 +693,19 @@ defmodule Ecto.Integration.AssocTest do u1 = TestRepo.insert!(%User{name: "john"}) u2 = TestRepo.insert!(%User{name: "mary"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: u1.id], - [post_id: p1.id, user_id: u1.id], - [post_id: p2.id, user_id: u2.id] - ]) - + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: u1.id], + [post_id: p1.id, user_id: u1.id], + [post_id: p2.id, user_id: u2.id]] TestRepo.delete!(p1) - [pid2] = TestRepo.all(from(p in Post, select: p.id)) + [pid2] = TestRepo.all from(p in Post, select: p.id) assert pid2 == p2.id - [[pid2, uid2]] = - TestRepo.all(from(j in "posts_users", select: [j.post_id, j.user_id])) - - assert pid2 == "#{p2.id}" - assert uid2 == "#{u2.id}" + [[pid2, uid2]] = TestRepo.all from(j in "posts_users", select: [j.post_id, j.user_id]) + assert pid2 == p2.id + assert uid2 == u2.id - [uid1, uid2] = TestRepo.all(from(u in User, select: u.id)) + [uid1, uid2] = TestRepo.all from(u in User, select: u.id) assert uid1 == u1.id assert uid2 == u2.id end diff --git a/integration/mssql/ecto/cases/interval.exs b/integration/mssql/ecto/cases/interval.exs new file mode 100644 index 0000000..6c1c359 --- /dev/null +++ b/integration/mssql/ecto/cases/interval.exs @@ -0,0 +1,344 @@ +defmodule Ecto.Integration.IntervalTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.{Post, User, Usec} + alias Ecto.Integration.TestRepo + import Ecto.Query + + @posted ~D[2014-01-01] + @inserted_at ~N[2014-01-01 02:00:00] + + setup do + TestRepo.insert!(%Post{posted: @posted, inserted_at: @inserted_at}) + :ok + end + + test "date_add with year" do + dec = Decimal.new(1) + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, 1, "year")) + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, 1.0, "year")) + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^1, "year")) + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^1.0, "year")) + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "year")) + end + + test "date_add with month" do + dec = Decimal.new(3) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, 3, "month")) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, 3.0, "month")) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^3, "month")) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^3.0, "month")) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "month")) + end + + test "date_add with week" do + dec = Decimal.new(3) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(p.posted, 3, "week")) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(p.posted, 3.0, "week")) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^3, "week")) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^3.0, "week")) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "week")) + end + + test "date_add with day" do + dec = Decimal.new(5) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(p.posted, 5, "day")) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(p.posted, 5.0, "day")) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^5, "day")) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^5.0, "day")) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "day")) + end + + test "date_add with hour" do + dec = Decimal.new(48) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(p.posted, 48, "hour")) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(p.posted, 48.0, "hour")) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^48, "hour")) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^48.0, "hour")) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "hour")) + end + + test "date_add with dynamic" do + posted = @posted + assert [~D[2015-01-01]] = TestRepo.all(from p in Post, select: date_add(^posted, ^1, ^"year")) + assert [~D[2014-04-01]] = TestRepo.all(from p in Post, select: date_add(^posted, ^3, ^"month")) + assert [~D[2014-01-22]] = TestRepo.all(from p in Post, select: date_add(^posted, ^3, ^"week")) + assert [~D[2014-01-06]] = TestRepo.all(from p in Post, select: date_add(^posted, ^5, ^"day")) + assert [~D[2014-01-03]] = TestRepo.all(from p in Post, select: date_add(^posted, ^48, ^"hour")) + end + + test "date_add with negative interval" do + dec = Decimal.new(-1) + assert [~D[2013-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, -1, "year")) + assert [~D[2013-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, -1.0, "year")) + assert [~D[2013-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^-1, "year")) + assert [~D[2013-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^-1.0, "year")) + assert [~D[2013-01-01]] = TestRepo.all(from p in Post, select: date_add(p.posted, ^dec, "year")) + end + + test "datetime_add with year" do + dec = Decimal.new(1) + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1, "year")) + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1.0, "year")) + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1, "year")) + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1.0, "year")) + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "year")) + end + + test "datetime_add with month" do + dec = Decimal.new(3) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 3, "month")) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 3.0, "month")) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^3, "month")) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^3.0, "month")) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "month")) + end + + test "datetime_add with week" do + dec = Decimal.new(3) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 3, "week")) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 3.0, "week")) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^3, "week")) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^3.0, "week")) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "week")) + end + + test "datetime_add with day" do + dec = Decimal.new(5) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 5, "day")) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 5.0, "day")) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^5, "day")) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^5.0, "day")) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "day")) + end + + test "datetime_add with hour" do + dec = Decimal.new(60) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 60, "hour")) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 60.0, "hour")) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^60, "hour")) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^60.0, "hour")) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "hour")) + end + + test "datetime_add with minute" do + dec = Decimal.new(90) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 90, "minute")) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 90.0, "minute")) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^90, "minute")) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^90.0, "minute")) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "minute")) + end + + test "datetime_add with second" do + dec = Decimal.new(90) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 90, "second")) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 90.0, "second")) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^90, "second")) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^90.0, "second")) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "second")) + end + + @tag :uses_msec + test "datetime_add with millisecond" do + dec = Decimal.new(1500) + assert [~N[2014-01-01 02:00:01]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1500, "millisecond")) + assert [~N[2014-01-01 02:00:01]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1500.0, "millisecond")) + assert [~N[2014-01-01 02:00:01]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1500, "millisecond")) + assert [~N[2014-01-01 02:00:01]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1500.0, "millisecond")) + assert [~N[2014-01-01 02:00:01]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "millisecond")) + end + + @tag :uses_usec + test "datetime_add with microsecond" do + dec = Decimal.new(1500) + assert [~N[2014-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1500, "microsecond")) + assert [~N[2014-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, 1500.0, "microsecond")) + assert [~N[2014-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1500, "microsecond")) + assert [~N[2014-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^1500.0, "microsecond")) + assert [~N[2014-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "microsecond")) + end + + test "datetime_add with dynamic" do + inserted_at = @inserted_at + assert [~N[2015-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^1, ^"year")) + assert [~N[2014-04-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^3, ^"month")) + assert [~N[2014-01-22 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^3, ^"week")) + assert [~N[2014-01-06 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^5, ^"day")) + assert [~N[2014-01-03 14:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^60, ^"hour")) + assert [~N[2014-01-01 03:30:00]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^90, ^"minute")) + assert [~N[2014-01-01 02:01:30]] = + TestRepo.all(from p in Post, select: datetime_add(^inserted_at, ^90, ^"second")) + end + + test "datetime_add with dynamic in filters" do + inserted_at = @inserted_at + field = :inserted_at + assert [_] = + TestRepo.all(from p in Post, where: p.inserted_at > datetime_add(^inserted_at, ^-1, "year")) + assert [_] = + TestRepo.all(from p in Post, where: p.inserted_at > datetime_add(^inserted_at, -3, "month")) + assert [_] = + TestRepo.all(from p in Post, where: field(p, ^field) > datetime_add(^inserted_at, ^-3, ^"week")) + assert [_] = + TestRepo.all(from p in Post, where: field(p, ^field) > datetime_add(^inserted_at, -5, ^"day")) + end + + test "datetime_add with negative interval" do + dec = Decimal.new(-1) + assert [~N[2013-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, -1, "year")) + assert [~N[2013-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, -1.0, "year")) + assert [~N[2013-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^-1, "year")) + assert [~N[2013-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^-1.0, "year")) + assert [~N[2013-01-01 02:00:00]] = + TestRepo.all(from p in Post, select: datetime_add(p.inserted_at, ^dec, "year")) + end + + test "from_now" do + current = DateTime.utc_now.year + dec = Decimal.new(5) + assert [%{year: year}] = TestRepo.all(from p in Post, select: from_now(5, "year")) + assert year > current + assert [%{year: year}] = TestRepo.all(from p in Post, select: from_now(5.0, "year")) + assert year > current + assert [%{year: year}] = TestRepo.all(from p in Post, select: from_now(^5, "year")) + assert year > current + assert [%{year: year}] = TestRepo.all(from p in Post, select: from_now(^5.0, "year")) + assert year > current + assert [%{year: year}] = TestRepo.all(from p in Post, select: from_now(^dec, "year")) + assert year > current + end + + test "ago" do + current = DateTime.utc_now.year + dec = Decimal.new(5) + assert [%{year: year}] = TestRepo.all(from p in Post, select: ago(5, "year")) + assert year < current + assert [%{year: year}] = TestRepo.all(from p in Post, select: ago(5.0, "year")) + assert year < current + assert [%{year: year}] = TestRepo.all(from p in Post, select: ago(^5, "year")) + assert year < current + assert [%{year: year}] = TestRepo.all(from p in Post, select: ago(^5.0, "year")) + assert year < current + assert [%{year: year}] = TestRepo.all(from p in Post, select: ago(^dec, "year")) + assert year < current + end + + test "datetime_add with utc_datetime" do + {:ok, datetime} = DateTime.from_naive(@inserted_at, "Etc/UTC") + TestRepo.insert!(%User{inserted_at: datetime}) + + {:ok, datetime} = DateTime.from_naive(~N[2015-01-01 02:00:00], "Etc/UTC") + dec = Decimal.new(1) + + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(type(^datetime, :utc_datetime), 0, "year")) + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(p.inserted_at, 1, "year")) + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(p.inserted_at, 1.0, "year")) + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(p.inserted_at, ^1, "year")) + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(p.inserted_at, ^1.0, "year")) + assert [^datetime] = + TestRepo.all(from p in User, select: datetime_add(p.inserted_at, ^dec, "year")) + end + + @tag :uses_usec + test "datetime_add with naive_datetime_usec" do + TestRepo.insert!(%Usec{naive_datetime_usec: ~N[2014-01-01 02:00:00.000001]}) + dec = Decimal.new(1500) + datetime = ~N[2014-01-01 02:00:00.001501] + + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(type(^datetime, :naive_datetime_usec), 0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.naive_datetime_usec, 1500, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.naive_datetime_usec, 1500.0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.naive_datetime_usec, ^1500, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.naive_datetime_usec, ^1500.0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.naive_datetime_usec, ^dec, "microsecond")) + end + + @tag :uses_usec + test "datetime_add with utc_datetime_usec" do + {:ok, datetime} = DateTime.from_naive(~N[2014-01-01 02:00:00.000001], "Etc/UTC") + TestRepo.insert!(%Usec{utc_datetime_usec: datetime}) + + {:ok, datetime} = DateTime.from_naive(~N[2014-01-01 02:00:00.001501], "Etc/UTC") + dec = Decimal.new(1500) + + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(type(^datetime, :utc_datetime_usec), 0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.utc_datetime_usec, 1500, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.utc_datetime_usec, 1500.0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.utc_datetime_usec, ^1500, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.utc_datetime_usec, ^1500.0, "microsecond")) + assert [^datetime] = + TestRepo.all(from u in Usec, select: datetime_add(u.utc_datetime_usec, ^dec, "microsecond")) + end +end diff --git a/integration/mssql/cases/joins.exs b/integration/mssql/ecto/cases/joins.exs similarity index 52% rename from integration/mssql/cases/joins.exs rename to integration/mssql/ecto/cases/joins.exs index 32028c5..3b5df25 100644 --- a/integration/mssql/cases/joins.exs +++ b/integration/mssql/ecto/cases/joins.exs @@ -1,6 +1,5 @@ defmodule Ecto.Integration.JoinsTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) alias Ecto.Integration.TestRepo import Ecto.Query @@ -15,30 +14,13 @@ defmodule Ecto.Integration.JoinsTest do test "update all with joins" do user = TestRepo.insert!(%User{name: "Tester"}) post = TestRepo.insert!(%Post{title: "foo"}) - - comment = - TestRepo.insert!(%Comment{ - text: "hey", - author_id: user.id, - post_id: post.id - }) + comment = TestRepo.insert!(%Comment{text: "hey", author_id: user.id, post_id: post.id}) another_post = TestRepo.insert!(%Post{title: "bar"}) + another_comment = TestRepo.insert!(%Comment{text: "another", author_id: user.id, post_id: another_post.id}) - another_comment = - TestRepo.insert!(%Comment{ - text: "another", - author_id: user.id, - post_id: another_post.id - }) - - query = - from( - c in Comment, - join: u in User, - on: u.id == c.author_id, - where: c.post_id in ^[post.id] - ) + query = from(c in Comment, join: u in User, on: u.id == c.author_id, + where: c.post_id in ^[post.id]) assert {1, nil} = TestRepo.update_all(query, set: [text: "hoo"]) assert %Comment{text: "hoo"} = TestRepo.get(Comment, comment.id) @@ -49,32 +31,18 @@ defmodule Ecto.Integration.JoinsTest do test "delete all with joins" do user = TestRepo.insert!(%User{name: "Tester"}) post = TestRepo.insert!(%Post{title: "foo"}) - TestRepo.insert!(%Comment{text: "hey", author_id: user.id, post_id: post.id}) - TestRepo.insert!(%Comment{text: "foo", author_id: user.id, post_id: post.id}) - TestRepo.insert!(%Comment{text: "bar", author_id: user.id}) - query = - from( - c in Comment, - join: u in User, - on: u.id == c.author_id, - where: is_nil(c.post_id) - ) - + query = from(c in Comment, join: u in User, on: u.id == c.author_id, + where: is_nil(c.post_id)) assert {1, nil} = TestRepo.delete_all(query) assert [%Comment{}, %Comment{}] = TestRepo.all(Comment) - query = - from( - c in Comment, - join: u in assoc(c, :author), - join: p in assoc(c, :post), - where: p.id in ^[post.id] - ) - + query = from(c in Comment, join: u in assoc(c, :author), + join: p in assoc(c, :post), + where: p.id in ^[post.id]) assert {2, nil} = TestRepo.delete_all(query) assert [] = TestRepo.all(Comment) end @@ -84,24 +52,10 @@ defmodule Ecto.Integration.JoinsTest do p2 = TestRepo.insert!(%Post{title: "2"}) c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) - query = - from( - p in Post, - join: c in assoc(p, :permalink), - order_by: p.id, - select: {p, c} - ) - + query = from(p in Post, join: c in assoc(p, :permalink), order_by: p.id, select: {p, c}) assert [{^p2, ^c1}] = TestRepo.all(query) - query = - from( - p in Post, - join: c in assoc(p, :permalink), - on: c.id == ^c1.id, - select: {p, c} - ) - + query = from(p in Post, join: c in assoc(p, :permalink), on: c.id == ^c1.id, select: {p, c}) assert [{^p2, ^c1}] = TestRepo.all(query) end @@ -111,30 +65,58 @@ defmodule Ecto.Integration.JoinsTest do c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) # Joined query without parameter - permalink = from(c in Permalink, where: c.url == "1") + permalink = from c in Permalink, where: c.url == "1" + + query = from(p in Post, join: c in ^permalink, on: c.post_id == p.id, select: {p, c}) + assert [{^p2, ^c1}] = TestRepo.all(query) + + # Joined query with parameter + permalink = from c in Permalink, where: c.url == "1" + + query = from(p in Post, join: c in ^permalink, on: c.id == ^c1.id, order_by: p.title, select: {p, c}) + assert [{^p1, ^c1}, {^p2, ^c1}] = TestRepo.all(query) + end + + test "named joins" do + _p = TestRepo.insert!(%Post{title: "1"}) + p2 = TestRepo.insert!(%Post{title: "2"}) + c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) query = - from( - p in Post, - join: c in ^permalink, - on: c.post_id == p.id, - select: {p, c} - ) + from(p in Post, join: c in assoc(p, :permalink), as: :permalink, order_by: p.id) + |> select([p, permalink: c], {p, c}) assert [{^p2, ^c1}] = TestRepo.all(query) + end + + test "joins with dynamic in :on" do + p = TestRepo.insert!(%Post{title: "1"}) + c = TestRepo.insert!(%Permalink{url: "1", post_id: p.id}) + + join_on = dynamic([p, ..., c], c.id == ^c.id) + + query = + from(p in Post, join: c in Permalink, on: ^join_on) + |> select([p, c], {p, c}) + + assert [{^p, ^c}] = TestRepo.all(query) - # Joined query witho parameter - permalink = from(c in Permalink, where: c.url == "1") + join_on = dynamic([p, permalink: c], c.id == ^c.id) query = - from( - p in Post, - join: c in ^permalink, - on: c.id == ^c1.id, - order_by: p.title, - select: {p, c} - ) + from(p in Post, join: c in Permalink, as: :permalink, on: ^join_on) + |> select([p, c], {p, c}) + + assert [{^p, ^c}] = TestRepo.all(query) + end + + @tag :cross_join + test "cross joins with missing entries" do + p1 = TestRepo.insert!(%Post{title: "1"}) + p2 = TestRepo.insert!(%Post{title: "2"}) + c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) + query = from(p in Post, cross_join: c in Permalink, order_by: p.id, select: {p, c}) assert [{^p1, ^c1}, {^p2, ^c1}] = TestRepo.all(query) end @@ -144,14 +126,7 @@ defmodule Ecto.Integration.JoinsTest do p2 = TestRepo.insert!(%Post{title: "2"}) c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) - query = - from( - p in Post, - left_join: c in assoc(p, :permalink), - order_by: p.id, - select: {p, c} - ) - + query = from(p in Post, left_join: c in assoc(p, :permalink), order_by: p.id, select: {p, c}) assert [{^p1, nil}, {^p2, ^c1}] = TestRepo.all(query) end @@ -160,24 +135,18 @@ defmodule Ecto.Integration.JoinsTest do %Post{id: pid1} = TestRepo.insert!(%Post{title: "1"}) %Post{id: pid2} = TestRepo.insert!(%Post{title: "2"}) - %Permalink{id: plid1} = - TestRepo.insert!(%Permalink{url: "1", post_id: pid2}) + %Permalink{id: plid1} = TestRepo.insert!(%Permalink{url: "1", post_id: pid2}) TestRepo.insert!(%Comment{text: "1", post_id: pid1}) TestRepo.insert!(%Comment{text: "2", post_id: pid2}) TestRepo.insert!(%Comment{text: "3", post_id: nil}) - query = - from( - p in Post, - right_join: c in assoc(p, :comments), - preload: :permalink, - order_by: c.id - ) - - assert [p1, p2, nil] = TestRepo.all(query) + query = from(p in Post, right_join: c in assoc(p, :comments), + preload: :permalink, order_by: c.id) + assert [p1, p2, p3] = TestRepo.all(query) assert p1.id == pid1 assert p2.id == pid2 + assert is_nil(p3.id) assert p1.permalink == nil assert p2.permalink.id == plid1 @@ -190,47 +159,26 @@ defmodule Ecto.Integration.JoinsTest do c1 = TestRepo.insert!(%Comment{text: "hey", post_id: post.id}) c2 = TestRepo.insert!(%Comment{text: "heya", post_id: post.id}) - query = - from( - p in Post, - join: c in assoc(p, :comments), - select: {p, c}, - order_by: p.id - ) - + query = from(p in Post, join: c in assoc(p, :comments), select: {p, c}, order_by: p.id) [{^post, ^c1}, {^post, ^c2}] = TestRepo.all(query) end test "has_one association join" do - post = TestRepo.insert!(%Post{title: "1", text: "hi"}) - p1 = TestRepo.insert!(%Permalink{url: "hey", post_id: post.id}) - p2 = TestRepo.insert!(%Permalink{url: "heya", post_id: post.id}) + user = TestRepo.insert!(%User{}) + p1 = TestRepo.insert!(%Permalink{url: "hey", user_id: user.id}) + p2 = TestRepo.insert!(%Permalink{url: "heya", user_id: user.id}) - query = - from( - p in Post, - join: c in assoc(p, :permalink), - select: {p, c}, - order_by: c.id - ) - - [{^post, ^p1}, {^post, ^p2}] = TestRepo.all(query) + query = from(p in User, join: c in assoc(p, :permalink), select: {p, c}, order_by: c.id) + [{^user, ^p1}, {^user, ^p2}] = TestRepo.all(query) end test "belongs_to association join" do - post = TestRepo.insert!(%Post{title: "1"}) - p1 = TestRepo.insert!(%Permalink{url: "hey", post_id: post.id}) - p2 = TestRepo.insert!(%Permalink{url: "heya", post_id: post.id}) + user = TestRepo.insert!(%User{}) + p1 = TestRepo.insert!(%Permalink{url: "hey", user_id: user.id}) + p2 = TestRepo.insert!(%Permalink{url: "heya", user_id: user.id}) - query = - from( - p in Permalink, - join: c in assoc(p, :post), - select: {p, c}, - order_by: p.id - ) - - [{^p1, ^post}, {^p2, ^post}] = TestRepo.all(query) + query = from(p in Permalink, join: c in assoc(p, :user), select: {p, c}, order_by: p.id) + [{^p1, ^user}, {^p2, ^user}] = TestRepo.all(query) end test "has_many through association join" do @@ -245,16 +193,54 @@ defmodule Ecto.Integration.JoinsTest do %Comment{} = TestRepo.insert!(%Comment{post_id: p1.id, author_id: u2.id}) %Comment{} = TestRepo.insert!(%Comment{post_id: p2.id, author_id: u2.id}) - query = - from( - p in Post, - join: a in assoc(p, :comments_authors), - select: {p, a}, - order_by: [p.id, a.name] - ) - - assert [{^p1, ^u2}, {^p1, ^u1}, {^p1, ^u1}, {^p2, ^u2}] = - TestRepo.all(query) + query = from p in Post, join: a in assoc(p, :comments_authors), select: {p, a}, order_by: [p.id, a.name] + assert [{^p1, ^u2}, {^p1, ^u1}, {^p1, ^u1}, {^p2, ^u2}] = TestRepo.all(query) + end + + test "has_many through nested association joins" do + u1 = TestRepo.insert!(%User{name: "Alice"}) + u2 = TestRepo.insert!(%User{name: "John"}) + + p1 = TestRepo.insert!(%Post{title: "p1", author_id: u1.id}) + p2 = TestRepo.insert!(%Post{title: "p2", author_id: u1.id}) + + TestRepo.insert!(%Comment{text: "c1", author_id: u1.id, post_id: p1.id}) + TestRepo.insert!(%Comment{text: "c2", author_id: u2.id, post_id: p1.id}) + TestRepo.insert!(%Comment{text: "c3", author_id: u2.id, post_id: p2.id}) + TestRepo.insert!(%Comment{text: "c4", post_id: p2.id}) + TestRepo.insert!(%Comment{text: "c5", author_id: u1.id, post_id: p2.id}) + + assert %{ + comments: [ + %{text: "c1"}, + %{text: "c5"} + ], + posts: [ + %{title: "p1"} = p1, + %{title: "p2"} = p2 + ] + } = + from(u in User) + |> join(:left, [u], p in assoc(u, :posts)) + |> join(:left, [u], c in assoc(u, :comments)) + |> join(:left, [_, p], c in assoc(p, :comments)) + |> preload( + [user, posts, comments, post_comments], + comments: comments, + posts: {posts, comments: {post_comments, :author}} + ) + |> TestRepo.get(u1.id) + + assert [ + %{text: "c1", author: %{name: "Alice"}}, + %{text: "c2", author: %{name: "John"}} + ] = p1.comments + + assert [ + %{text: "c3", author: %{name: "John"}}, + %{text: "c4", author: nil}, + %{text: "c5", author: %{name: "Alice"}} + ] = p2.comments end test "many_to_many association join" do @@ -264,20 +250,11 @@ defmodule Ecto.Integration.JoinsTest do u1 = TestRepo.insert!(%User{name: "john"}) u2 = TestRepo.insert!(%User{name: "mary"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: u1.id], - [post_id: p1.id, user_id: u2.id], - [post_id: p2.id, user_id: u2.id] - ]) - - query = - from( - p in Post, - join: u in assoc(p, :users), - select: {p, u}, - order_by: p.id - ) + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: u1.id], + [post_id: p1.id, user_id: u2.id], + [post_id: p2.id, user_id: u2.id]] + query = from(p in Post, join: u in assoc(p, :users), select: {p, u}, order_by: p.id) [{^p1, ^u1}, {^p1, ^u2}, {^p2, ^u2}] = TestRepo.all(query) end @@ -292,22 +269,14 @@ defmodule Ecto.Integration.JoinsTest do c3 = TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) # Without on - query = - from(p in Post, join: c in assoc(p, :comments), preload: [comments: c]) - + query = from(p in Post, join: c in assoc(p, :comments), preload: [comments: c]) [p1, p2] = TestRepo.all(query) assert p1.comments == [c1, c2] assert p2.comments == [c3] # With on - query = - from( - p in Post, - left_join: c in assoc(p, :comments), - on: p.title == c.text, - preload: [comments: c] - ) - + query = from(p in Post, left_join: c in assoc(p, :comments), + on: p.title == c.text, preload: [comments: c]) [p1, p2] = TestRepo.all(query) assert p1.comments == [c1] assert p2.comments == [] @@ -321,13 +290,7 @@ defmodule Ecto.Integration.JoinsTest do _pl = TestRepo.insert!(%Permalink{url: "2"}) pl3 = TestRepo.insert!(%Permalink{url: "3", post_id: p2.id}) - query = - from( - p in Post, - join: pl in assoc(p, :permalink), - preload: [permalink: pl] - ) - + query = from(p in Post, join: pl in assoc(p, :permalink), preload: [permalink: pl]) assert [post1, post3] = TestRepo.all(query) assert post1.permalink == pl1 @@ -342,14 +305,7 @@ defmodule Ecto.Integration.JoinsTest do TestRepo.insert!(%Permalink{url: "2"}) TestRepo.insert!(%Permalink{url: "3", post_id: p2.id}) - query = - from( - pl in Permalink, - left_join: p in assoc(pl, :post), - preload: [post: p], - order_by: pl.id - ) - + query = from(pl in Permalink, left_join: p in assoc(pl, :post), preload: [post: p], order_by: pl.id) assert [pl1, pl2, pl3] = TestRepo.all(query) assert pl1.post == p1 @@ -364,36 +320,20 @@ defmodule Ecto.Integration.JoinsTest do u1 = TestRepo.insert!(%User{name: "1"}) u2 = TestRepo.insert!(%User{name: "2"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: u1.id], - [post_id: p1.id, user_id: u2.id], - [post_id: p2.id, user_id: u2.id] - ]) + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: u1.id], + [post_id: p1.id, user_id: u2.id], + [post_id: p2.id, user_id: u2.id]] # Without on - query = - from( - p in Post, - left_join: u in assoc(p, :users), - preload: [users: u], - order_by: p.id - ) - + query = from(p in Post, left_join: u in assoc(p, :users), preload: [users: u], order_by: p.id) [p1, p2, p3] = TestRepo.all(query) assert p1.users == [u1, u2] assert p2.users == [u2] assert p3.users == [] # With on - query = - from( - p in Post, - left_join: u in assoc(p, :users), - on: p.title == u.name, - preload: [users: u], - order_by: p.id - ) - + query = from(p in Post, left_join: u in assoc(p, :users), on: p.title == u.name, + preload: [users: u], order_by: p.id) [p1, p2, p3] = TestRepo.all(query) assert p1.users == [u1] assert p2.users == [u2] @@ -413,26 +353,15 @@ defmodule Ecto.Integration.JoinsTest do TestRepo.insert!(%Comment{post_id: p2.id, author_id: u2.id}) # Without on - query = - from( - p in Post, - left_join: ca in assoc(p, :comments_authors), - preload: [comments_authors: ca] - ) - + query = from(p in Post, left_join: ca in assoc(p, :comments_authors), + preload: [comments_authors: ca]) [p1, p2] = TestRepo.all(query) assert p1.comments_authors == [u1, u2] assert p2.comments_authors == [u2] # With on - query = - from( - p in Post, - left_join: ca in assoc(p, :comments_authors), - on: ca.name == p.title, - preload: [comments_authors: ca] - ) - + query = from(p in Post, left_join: ca in assoc(p, :comments_authors), + on: ca.name == p.title, preload: [comments_authors: ca]) [p1, p2] = TestRepo.all(query) assert p1.comments_authors == [u1] assert p2.comments_authors == [u2] @@ -453,13 +382,8 @@ defmodule Ecto.Integration.JoinsTest do %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2}) %Comment{} = TestRepo.insert!(%Comment{post_id: pid2, author_id: uid2}) - query = - from( - p in Permalink, - left_join: ca in assoc(p, :post_comments_authors), - preload: [post_comments_authors: ca], - order_by: ca.id - ) + query = from(p in Permalink, left_join: ca in assoc(p, :post_comments_authors), + preload: [post_comments_authors: ca], order_by: ca.id) [l1, l2] = TestRepo.all(query) [u1, u2] = l1.post_comments_authors @@ -470,16 +394,12 @@ defmodule Ecto.Integration.JoinsTest do assert u2.id == uid2 # Insert some intermediary joins to check indexes won't be shuffled - query = - from( - p in Permalink, - left_join: assoc(p, :post), - left_join: ca in assoc(p, :post_comments_authors), - left_join: assoc(p, :post), - left_join: assoc(p, :post), - preload: [post_comments_authors: ca], - order_by: ca.id - ) + query = from(p in Permalink, + left_join: assoc(p, :post), + left_join: ca in assoc(p, :post_comments_authors), + left_join: assoc(p, :post), + left_join: assoc(p, :post), + preload: [post_comments_authors: ca], order_by: ca.id) [l1, l2] = TestRepo.all(query) [u1, u2] = l1.post_comments_authors @@ -499,25 +419,17 @@ defmodule Ecto.Integration.JoinsTest do %User{id: uid1} = TestRepo.insert!(%User{name: "1"}) %User{id: uid2} = TestRepo.insert!(%User{name: "2"}) - %Comment{id: cid1} = - TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) - - %Comment{id: cid2} = - TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: uid2}) - - %Comment{id: cid3} = - TestRepo.insert!(%Comment{text: "3", post_id: pid2, author_id: uid2}) + %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) + %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: uid2}) + %Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: pid2, author_id: uid2}) # use multiple associations to force parallel preloader - query = - from( - p in Post, - left_join: c in assoc(p, :comments), - left_join: u in assoc(c, :author), - order_by: [p.id, c.id, u.id], - preload: [:permalink, comments: {c, author: {u, [:comments, :custom]}}], - select: {0, [p], 1, 2} - ) + query = from p in Post, + left_join: c in assoc(p, :comments), + left_join: u in assoc(c, :author), + order_by: [p.id, c.id, u.id], + preload: [:permalink, comments: {c, author: {u, [:comments, :custom]}}], + select: {0, [p], 1, 2} posts = TestRepo.all(query) assert [p1, p2] = Enum.map(posts, fn {0, [p], 1, 2} -> p end) @@ -543,23 +455,15 @@ defmodule Ecto.Integration.JoinsTest do %User{id: uid1} = TestRepo.insert!(%User{name: "1"}) %User{id: uid2} = TestRepo.insert!(%User{name: "2"}) - %Comment{id: cid1} = - TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) + %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) + %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: nil}) + %Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: pid3, author_id: uid2}) - %Comment{id: cid2} = - TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: nil}) - - %Comment{id: cid3} = - TestRepo.insert!(%Comment{text: "3", post_id: pid3, author_id: uid2}) - - query = - from( - p in Post, - left_join: c in assoc(p, :comments), - left_join: u in assoc(c, :author), - order_by: [p.id, c.id, u.id], - preload: [comments: {c, author: u}] - ) + query = from p in Post, + left_join: c in assoc(p, :comments), + left_join: u in assoc(c, :author), + order_by: [p.id, c.id, u.id], + preload: [comments: {c, author: u}] assert [p1, p2, p3] = TestRepo.all(query) assert p1.id == pid1 @@ -585,23 +489,15 @@ defmodule Ecto.Integration.JoinsTest do %User{id: uid1} = TestRepo.insert!(%User{name: "1"}) %User{id: uid2} = TestRepo.insert!(%User{name: "2"}) - %Comment{id: cid1} = - TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) + %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid1, author_id: uid1}) + %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: uid2}) + %Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: pid2, author_id: uid2}) - %Comment{id: cid2} = - TestRepo.insert!(%Comment{text: "2", post_id: pid1, author_id: uid2}) - - %Comment{id: cid3} = - TestRepo.insert!(%Comment{text: "3", post_id: pid2, author_id: uid2}) - - query = - from( - p in Post, - left_join: c in assoc(p, :comments), - order_by: [p.id, c.id], - preload: [comments: {c, :author}], - select: p - ) + query = from p in Post, + left_join: c in assoc(p, :comments), + order_by: [p.id, c.id], + preload: [comments: {c, :author}], + select: p assert [p1, p2] = TestRepo.all(query) assert p1.id == pid1 @@ -622,21 +518,17 @@ defmodule Ecto.Integration.JoinsTest do %Post{id: pid1} = TestRepo.insert!(%Post{title: "1"}) %Post{id: pid2} = TestRepo.insert!(%Post{title: "2"}) - %Permalink{id: plid1} = - TestRepo.insert!(%Permalink{url: "1", post_id: pid2}) + %Permalink{id: plid1} = TestRepo.insert!(%Permalink{url: "1", post_id: pid2}) %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid1}) %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: pid2}) - %Comment{id: _} = TestRepo.insert!(%Comment{text: "3", post_id: pid2}) + %Comment{id: _} = TestRepo.insert!(%Comment{text: "3", post_id: pid2}) - query = - from( - p in Post, - left_join: c in assoc(p, :comments), - where: c.text in ~w(1 2), - preload: [:permalink, comments: c], - select: {0, [p], 1, 2} - ) + query = from p in Post, + left_join: c in assoc(p, :comments), + where: c.text in ~w(1 2), + preload: [:permalink, comments: c], + select: {0, [p], 1, 2} posts = TestRepo.all(query) assert [p1, p2] = Enum.map(posts, fn {0, [p], 1, 2} -> p end) @@ -651,19 +543,35 @@ defmodule Ecto.Integration.JoinsTest do assert c2.id == cid2 end + test "mixing regular join and assoc selector" do + p1 = TestRepo.insert!(%Post{title: "1"}) + p2 = TestRepo.insert!(%Post{title: "2"}) + + c1 = TestRepo.insert!(%Comment{text: "1", post_id: p1.id}) + c2 = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) + c3 = TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) + + pl1 = TestRepo.insert!(%Permalink{url: "1", post_id: p1.id}) + _pl = TestRepo.insert!(%Permalink{url: "2"}) + pl3 = TestRepo.insert!(%Permalink{url: "3", post_id: p2.id}) + + # Without on + query = from(p in Post, join: pl in assoc(p, :permalink), + join: c in assoc(p, :comments), + preload: [permalink: pl], + select: {p, c}) + [{p1, ^c1}, {p1, ^c2}, {p2, ^c3}] = TestRepo.all(query) + assert p1.permalink == pl1 + assert p2.permalink == pl3 + end + test "association with composite pk join" do post = TestRepo.insert!(%Post{title: "1", text: "hi"}) user = TestRepo.insert!(%User{name: "1"}) TestRepo.insert!(%PostUserCompositePk{post_id: post.id, user_id: user.id}) - query = - from( - p in Post, - join: a in assoc(p, :post_user_composite_pk), - preload: [post_user_composite_pk: a], - select: p - ) - + query = from(p in Post, join: a in assoc(p, :post_user_composite_pk), + preload: [post_user_composite_pk: a], select: p) assert [post] = TestRepo.all(query) assert post.post_user_composite_pk end diff --git a/integration/mssql/cases/preload.exs b/integration/mssql/ecto/cases/preload.exs similarity index 71% rename from integration/mssql/cases/preload.exs rename to integration/mssql/ecto/cases/preload.exs index 44bd689..5dab8bf 100644 --- a/integration/mssql/cases/preload.exs +++ b/integration/mssql/ecto/cases/preload.exs @@ -1,6 +1,5 @@ defmodule Ecto.Integration.PreloadTest do - use Ecto.Integration.Case, - async: Application.get_env(:ecto, :async_integration_tests, true) + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) alias Ecto.Integration.TestRepo import Ecto.Query @@ -38,13 +37,8 @@ defmodule Ecto.Integration.PreloadTest do assert %Ecto.Association.NotLoaded{} = p1.comments [p3, p1, p2] = TestRepo.preload([p3, p1, p2], :comments) - - assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = - p1.comments |> sort_by_id - - assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = - p2.comments |> sort_by_id - + assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id + assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id assert [] = p3.comments end @@ -53,13 +47,9 @@ defmodule Ecto.Integration.PreloadTest do p2 = TestRepo.insert!(%Post{title: "2"}) p3 = TestRepo.insert!(%Post{title: "3"}) - %Permalink{id: pid1} = - TestRepo.insert!(%Permalink{url: "1", post_id: p1.id}) - - %Permalink{} = TestRepo.insert!(%Permalink{url: "2", post_id: nil}) - - %Permalink{id: pid3} = - TestRepo.insert!(%Permalink{url: "3", post_id: p3.id}) + %Permalink{id: pid1} = TestRepo.insert!(%Permalink{url: "1", post_id: p1.id}) + %Permalink{} = TestRepo.insert!(%Permalink{url: "2", post_id: nil}) + %Permalink{id: pid3} = TestRepo.insert!(%Permalink{url: "3", post_id: p3.id}) assert %Ecto.Association.NotLoaded{} = p1.permalink assert %Ecto.Association.NotLoaded{} = p2.permalink @@ -111,14 +101,12 @@ defmodule Ecto.Integration.PreloadTest do %User{id: uid2} = TestRepo.insert!(%User{name: "2"}) %User{id: uid4} = TestRepo.insert!(%User{name: "3"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: uid1], - [post_id: p1.id, user_id: uid2], - [post_id: p2.id, user_id: uid3], - [post_id: p2.id, user_id: uid4], - [post_id: p3.id, user_id: uid1], - [post_id: p3.id, user_id: uid4] - ]) + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: uid1], + [post_id: p1.id, user_id: uid2], + [post_id: p2.id, user_id: uid3], + [post_id: p2.id, user_id: uid4], + [post_id: p3.id, user_id: uid1], + [post_id: p3.id, user_id: uid4]] assert %Ecto.Association.NotLoaded{} = p1.users @@ -240,11 +228,9 @@ defmodule Ecto.Integration.PreloadTest do %User{id: uid1} = TestRepo.insert!(%User{name: "foo"}) %User{id: uid2} = TestRepo.insert!(%User{name: "bar"}) - TestRepo.insert_all("posts_users", [ - [post_id: p1.id, user_id: uid1], - [post_id: p1.id, user_id: uid2], - [post_id: p2.id, user_id: uid2] - ]) + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: uid1], + [post_id: p1.id, user_id: uid2], + [post_id: p2.id, user_id: uid2]] %Comment{id: cid1} = TestRepo.insert!(%Comment{author_id: uid1}) %Comment{id: cid2} = TestRepo.insert!(%Comment{author_id: uid1}) @@ -324,17 +310,61 @@ defmodule Ecto.Integration.PreloadTest do %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) %Comment{id: cid4} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) - assert [pe3, pe1, pe2] = - TestRepo.preload( - [p3, p1, p2], - comments: fn _ -> TestRepo.all(Comment) end - ) - + assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2], + comments: fn _ -> TestRepo.all(Comment) end) assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = pe1.comments assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = pe2.comments assert [] = pe3.comments end + test "preload many_to_many with function" do + p1 = TestRepo.insert!(%Post{title: "1"}) + p2 = TestRepo.insert!(%Post{title: "2"}) + p3 = TestRepo.insert!(%Post{title: "3"}) + + # We use the same name to expose bugs in preload sorting + %User{id: uid1} = TestRepo.insert!(%User{name: "1"}) + %User{id: uid3} = TestRepo.insert!(%User{name: "2"}) + %User{id: uid2} = TestRepo.insert!(%User{name: "2"}) + %User{id: uid4} = TestRepo.insert!(%User{name: "3"}) + + TestRepo.insert_all "posts_users", [[post_id: p1.id, user_id: uid1], + [post_id: p1.id, user_id: uid2], + [post_id: p2.id, user_id: uid3], + [post_id: p2.id, user_id: uid4], + [post_id: p3.id, user_id: uid1], + [post_id: p3.id, user_id: uid4]] + + wrong_preloader = fn post_ids -> + TestRepo.all( + from u in User, + join: pu in "posts_users", + where: pu.post_id in ^post_ids and pu.user_id == u.id, + order_by: u.id, + select: map(u, [:id]) + ) + end + + assert_raise RuntimeError, ~r/invalid custom preload for `users` on `Ecto.Integration.Post`/, fn -> + TestRepo.preload([p1, p2, p3], users: wrong_preloader) + end + + right_preloader = fn post_ids -> + TestRepo.all( + from u in User, + join: pu in "posts_users", + where: pu.post_id in ^post_ids and pu.user_id == u.id, + order_by: u.id, + select: {pu.post_id, map(u, [:id])} + ) + end + + [p1, p2, p3] = TestRepo.preload([p1, p2, p3], users: right_preloader) + assert p1.users == [%{id: uid1}, %{id: uid2}] + assert p2.users == [%{id: uid3}, %{id: uid4}] + assert p3.users == [%{id: uid1}, %{id: uid4}] + end + test "preload with query" do p1 = TestRepo.insert!(%Post{title: "1"}) p2 = TestRepo.insert!(%Post{title: "2"}) @@ -349,45 +379,29 @@ defmodule Ecto.Integration.PreloadTest do assert %Ecto.Association.NotLoaded{} = p1.comments # With empty query - assert [pe3, pe1, pe2] = - TestRepo.preload( - [p3, p1, p2], - comments: from(c in Comment, where: false) - ) - + assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2], + comments: from(c in Comment, where: false)) assert [] = pe1.comments assert [] = pe2.comments assert [] = pe3.comments # With custom select - assert [pe3, pe1, pe2] = - TestRepo.preload( - [p3, p1, p2], - comments: from(c in Comment, select: c.id, order_by: c.id) - ) - + assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2], + comments: from(c in Comment, select: c.id)) assert [^cid1, ^cid2] = pe1.comments assert [^cid3, ^cid4] = pe2.comments assert [] = pe3.comments # With custom ordered query - assert [pe3, pe1, pe2] = - TestRepo.preload( - [p3, p1, p2], - comments: from(c in Comment, order_by: [desc: c.text]) - ) - + assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2], + comments: from(c in Comment, order_by: [desc: c.text])) assert [%Comment{id: ^cid2}, %Comment{id: ^cid1}] = pe1.comments assert [%Comment{id: ^cid4}, %Comment{id: ^cid3}] = pe2.comments assert [] = pe3.comments # With custom ordered query with preload - assert [pe3, pe1, pe2] = - TestRepo.preload( - [p3, p1, p2], - comments: {from(c in Comment, order_by: [desc: c.text]), :post} - ) - + assert [pe3, pe1, pe2] = TestRepo.preload([p3, p1, p2], + comments: {from(c in Comment, order_by: [desc: c.text]), :post}) assert [%Comment{id: ^cid2} = c2, %Comment{id: ^cid1} = c1] = pe1.comments assert [%Comment{id: ^cid4} = c4, %Comment{id: ^cid3} = c3] = pe2.comments assert [] = pe3.comments @@ -412,33 +426,17 @@ defmodule Ecto.Integration.PreloadTest do %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: u3.id}) %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: u4.id}) - np1 = - TestRepo.preload( - p1, - comments_authors: from(u in User, where: u.name == "foo") - ) - + np1 = TestRepo.preload(p1, comments_authors: from(u in User, where: u.name == "foo")) assert np1.comments_authors == [u1] - assert_raise ArgumentError, - ~r/Ecto expected a map\/struct with the key `id` but got: \d+/, - fn -> - TestRepo.preload( - p1, - comments_authors: - from(u in User, order_by: u.name, select: u.id) - ) - end + assert_raise ArgumentError, ~r/Ecto expected a map\/struct with the key `id` but got: \d+/, fn -> + TestRepo.preload(p1, comments_authors: from(u in User, order_by: u.name, select: u.id)) + end # The subpreload order does not matter because the result is dictated by comments - np1 = - TestRepo.preload( - p1, - comments_authors: from(u in User, order_by: u.name, select: %{id: u.id}) - ) - + np1 = TestRepo.preload(p1, comments_authors: from(u in User, order_by: u.name, select: %{id: u.id})) assert np1.comments_authors == - [%{id: u1.id}, %{id: u2.id}, %{id: u3.id}, %{id: u4.id}] + [%{id: u1.id}, %{id: u2.id}, %{id: u3.id}, %{id: u4.id}] end ## With take @@ -455,26 +453,14 @@ defmodule Ecto.Integration.PreloadTest do assert %Ecto.Association.NotLoaded{} = p1.comments - posts = - TestRepo.all( - from( - Post, - preload: [:comments], - select: [:id, comments: [:id, :post_id]] - ) - ) - + posts = TestRepo.all(from Post, preload: [:comments], select: [:id, comments: [:id, :post_id]]) [p1, p2, p3] = sort_by_id(posts) assert p1.title == nil assert p2.title == nil assert p3.title == nil - assert [%{id: ^cid1, text: nil}, %{id: ^cid2, text: nil}] = - sort_by_id(p1.comments) - - assert [%{id: ^cid3, text: nil}, %{id: ^cid4, text: nil}] = - sort_by_id(p2.comments) - + assert [%{id: ^cid1, text: nil}, %{id: ^cid2, text: nil}] = sort_by_id(p1.comments) + assert [%{id: ^cid3, text: nil}, %{id: ^cid4, text: nil}] = sort_by_id(p2.comments) assert [] = sort_by_id(p3.comments) end @@ -488,17 +474,8 @@ defmodule Ecto.Integration.PreloadTest do %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1}) %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2}) - [p1] = - TestRepo.all( - from( - Post, - preload: [:comments_authors], - select: [:id, comments_authors: :id] - ) - ) - - [%{id: ^uid1, name: nil}, %{id: ^uid2, name: nil}] = - p1.comments_authors |> sort_by_id + [p1] = TestRepo.all from Post, preload: [:comments_authors], select: [:id, comments_authors: :id] + [%{id: ^uid1, name: nil}, %{id: ^uid2, name: nil}] = p1.comments_authors |> sort_by_id end ## Nested @@ -523,7 +500,7 @@ defmodule Ecto.Integration.PreloadTest do TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) TestRepo.insert!(%Comment{text: "4", post_id: p2.id}) - assert [p2, p1] = TestRepo.preload([p2, p1], comments: :post) + assert [p2, p1] = TestRepo.preload([p2, p1], [comments: :post]) assert [c1, c2] = p1.comments assert [c3, c4] = p2.comments assert p1.id == c1.post.id @@ -565,13 +542,7 @@ defmodule Ecto.Integration.PreloadTest do test "preload custom prefix from options" do p = TestRepo.insert!(%Post{title: "1"}) # This preload should fail because it points to a prefix that does not exist - assert catch_error( - TestRepo.preload( - p, - [:comments], - prefix: "this_surely_does_not_exist" - ) - ) + assert catch_error(TestRepo.preload(p, [:comments], prefix: "this_surely_does_not_exist")) end test "preload with binary_id" do @@ -597,11 +568,8 @@ defmodule Ecto.Integration.PreloadTest do test "preload skips already loaded for cardinality one" do %Post{id: pid} = TestRepo.insert!(%Post{title: "1"}) - c1 = - %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid}) - - c2 = - %Comment{id: _cid} = TestRepo.insert!(%Comment{text: "2", post_id: nil}) + c1 = %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: pid}) + c2 = %Comment{id: _cid} = TestRepo.insert!(%Comment{text: "2", post_id: nil}) [c1, c2] = TestRepo.preload([c1, c2], :post) assert %Post{id: ^pid} = c1.post @@ -610,21 +578,17 @@ defmodule Ecto.Integration.PreloadTest do [c1, c2] = TestRepo.preload([c1, c2], post: :comments) assert [%Comment{id: ^cid1}] = c1.post.comments - TestRepo.update_all(Post, set: [title: "0"]) - TestRepo.update_all(Comment, set: [post_id: pid]) + TestRepo.update_all Post, set: [title: "0"] + TestRepo.update_all Comment, set: [post_id: pid] # Preloading once again shouldn't change the result [c1, c2] = TestRepo.preload([c1, c2], :post) - assert %Post{id: ^pid, title: "1", comments: [_ | _]} = c1.post + assert %Post{id: ^pid, title: "1", comments: [_|_]} = c1.post assert c2.post == nil [c1, c2] = TestRepo.preload([c1, %{c2 | post_id: pid}], :post, force: true) - - assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = - c1.post - - assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = - c2.post + assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = c1.post + assert %Post{id: ^pid, title: "0", comments: %Ecto.Association.NotLoaded{}} = c2.post end test "preload skips already loaded for cardinality many" do @@ -642,7 +606,7 @@ defmodule Ecto.Integration.PreloadTest do assert hd(p1.comments).post.id == p1.id assert hd(p2.comments).post.id == p2.id - TestRepo.update_all(Comment, set: [text: "0"]) + TestRepo.update_all Comment, set: [text: "0"] # Preloading once again shouldn't change the result [p1, p2] = TestRepo.preload([p1, p2], :comments) @@ -650,12 +614,57 @@ defmodule Ecto.Integration.PreloadTest do assert [%Comment{id: ^cid2, text: "2", post: %Post{}}] = p2.comments [p1, p2] = TestRepo.preload([p1, p2], :comments, force: true) + assert [%Comment{id: ^cid1, text: "0", post: %Ecto.Association.NotLoaded{}}] = p1.comments + assert [%Comment{id: ^cid2, text: "0", post: %Ecto.Association.NotLoaded{}}] = p2.comments + end + + test "preload skips already loaded for cardinality many through" do + %Post{id: pid1} = p1 = TestRepo.insert!(%Post{}) + %Post{id: pid2} = p2 = TestRepo.insert!(%Post{}) + + %User{id: uid1} = TestRepo.insert!(%User{name: "foo"}) + %User{id: uid2} = TestRepo.insert!(%User{name: "bar"}) + + %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1}) + %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid1}) + %Comment{} = TestRepo.insert!(%Comment{post_id: pid1, author_id: uid2}) + %Comment{} = TestRepo.insert!(%Comment{post_id: pid2, author_id: uid2}) + + [p1, p2] = TestRepo.preload([p1, p2], :comments_authors) + TestRepo.update_all User, set: [name: "0"] + [p1, p2] = TestRepo.preload([p1, p2], :comments_authors) + + # Through was preloaded + [u1, u2] = p1.comments_authors |> sort_by_id + assert u1.id == uid1 + assert u1.name == "foo" + assert u2.id == uid2 + assert u2.name == "bar" + + [u2] = p2.comments_authors + assert u2.id == uid2 + assert u2.name == "bar" + + # Even directly changing it preserves it + comments_authors = + for user <- p1.comments_authors do + update_in user.name, &String.upcase/1 + end - assert [%Comment{id: ^cid1, text: "0", post: %Ecto.Association.NotLoaded{}}] = - p1.comments + p1 = TestRepo.preload(%{p1 | comments_authors: comments_authors}, :comments_authors) + [u1, u2] = p1.comments_authors |> sort_by_id + assert u1.id == uid1 + assert u1.name == "FOO" + assert u2.id == uid2 + assert u2.name == "BAR" - assert [%Comment{id: ^cid2, text: "0", post: %Ecto.Association.NotLoaded{}}] = - p2.comments + # Unless we force it + p1 = TestRepo.preload(p1, :comments_authors, force: true) + [u1, u2] = p1.comments_authors |> sort_by_id + assert u1.id == uid1 + assert u1.name == "0" + assert u2.id == uid2 + assert u2.name == "0" end test "preload keyword query" do @@ -672,22 +681,12 @@ defmodule Ecto.Integration.PreloadTest do query = from(p in Post, preload: [:comments], select: p) assert [p1, p2, p3] = TestRepo.all(query) |> sort_by_id - - assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = - p1.comments |> sort_by_id - - assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = - p2.comments |> sort_by_id - + assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id + assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id assert [] = p3.comments # Query with interpolated preload query - query = - from( - p in Post, - preload: [comments: ^from(c in Comment, where: false)], - select: p - ) + query = from(p in Post, preload: [comments: ^from(c in Comment, where: false)], select: p) assert [p1, p2, p3] = TestRepo.all(query) assert [] = p1.comments @@ -701,16 +700,12 @@ defmodule Ecto.Integration.PreloadTest do posts = TestRepo.all(query) [p1, p2, p3] = Enum.map(posts, fn {0, [p], 1, 2} -> p end) |> sort_by_id - assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = - p1.comments |> sort_by_id - - assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = - p2.comments |> sort_by_id - + assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = p1.comments |> sort_by_id + assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = p2.comments |> sort_by_id assert [] = p3.comments end defp sort_by_id(values) do - Enum.sort_by(values, & &1.id) + Enum.sort_by(values, &(&1.id)) end end diff --git a/integration/mssql/ecto/cases/repo.exs b/integration/mssql/ecto/cases/repo.exs new file mode 100644 index 0000000..b5939ba --- /dev/null +++ b/integration/mssql/ecto/cases/repo.exs @@ -0,0 +1,1789 @@ +Code.require_file "../support/types.exs", __DIR__ + +defmodule Ecto.Integration.RepoTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.TestRepo + import Ecto.Query + + alias Ecto.Integration.Post + alias Ecto.Integration.Order + alias Ecto.Integration.User + alias Ecto.Integration.Comment + alias Ecto.Integration.Permalink + alias Ecto.Integration.Custom + alias Ecto.Integration.Barebone + alias Ecto.Integration.CompositePk + alias Ecto.Integration.PostUserCompositePk + + test "returns already started for started repos" do + assert {:error, {:already_started, _}} = TestRepo.start_link + end + + test "supports unnamed repos" do + assert {:ok, pid} = TestRepo.start_link(name: nil) + assert Ecto.Repo.Queryable.all(pid, Post, []) == [] + end + + test "all empty" do + assert TestRepo.all(Post) == [] + assert TestRepo.all(from p in Post) == [] + end + + test "all with in" do + TestRepo.insert!(%Post{title: "hello"}) + + # Works without the query cache. + assert_raise Ecto.Query.CastError, fn -> + TestRepo.all(from p in Post, where: p.title in ^nil) + end + + assert [] = TestRepo.all from p in Post, where: p.title in [] + assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"] + assert [] = TestRepo.all from p in Post, where: p.title in ^[] + + assert [_] = TestRepo.all from p in Post, where: p.title not in [] + assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"] + assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"] + assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"] + + # Still doesn't work after the query cache. + assert_raise Ecto.Query.CastError, fn -> + TestRepo.all(from p in Post, where: p.title in ^nil) + end + end + + test "all using named from" do + TestRepo.insert!(%Post{title: "hello"}) + + query = + from(p in Post, as: :post) + |> where([post: p], p.title == "hello") + + assert [_] = TestRepo.all query + end + + test "all without schema" do + %Post{} = TestRepo.insert!(%Post{title: "title1"}) + %Post{} = TestRepo.insert!(%Post{title: "title2"}) + + assert ["title1", "title2"] = + TestRepo.all(from(p in "posts", order_by: p.title, select: p.title)) + + assert [_] = + TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id)) + end + + test "all shares metadata" do + TestRepo.insert!(%Post{title: "title1"}) + TestRepo.insert!(%Post{title: "title2"}) + + [post1, post2] = TestRepo.all(Post) + assert :erts_debug.same(post1.__meta__, post2.__meta__) + + [new_post1, new_post2] = TestRepo.all(Post) + assert :erts_debug.same(post1.__meta__, new_post1.__meta__) + assert :erts_debug.same(post2.__meta__, new_post2.__meta__) + end + + @tag :invalid_prefix + test "all with invalid prefix" do + assert catch_error(TestRepo.all("posts", prefix: "oops")) + end + + test "insert, update and delete" do + post = %Post{title: "insert, update, delete", text: "fetch empty"} + meta = post.__meta__ + + assert %Post{} = inserted = TestRepo.insert!(post) + assert %Post{} = updated = TestRepo.update!(Ecto.Changeset.change(inserted, text: "new")) + + deleted_meta = put_in meta.state, :deleted + assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(updated) + + loaded_meta = put_in meta.state, :loaded + assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post) + + post = TestRepo.one(Post) + assert post.__meta__.state == :loaded + assert post.inserted_at + end + + test "insert, update and delete with field source" do + permalink = %Permalink{url: "url"} + assert %Permalink{url: "url"} = inserted = + TestRepo.insert!(permalink) + assert %Permalink{url: "new"} = updated = + TestRepo.update!(Ecto.Changeset.change(inserted, url: "new")) + assert %Permalink{url: "new"} = + TestRepo.delete!(updated) + end + + @tag :composite_pk + test "insert, update and delete with composite pk" do + c1 = TestRepo.insert!(%CompositePk{a: 1, b: 2, name: "first"}) + c2 = TestRepo.insert!(%CompositePk{a: 1, b: 3, name: "second"}) + + assert CompositePk |> first |> TestRepo.one == c1 + assert CompositePk |> last |> TestRepo.one == c2 + + changeset = Ecto.Changeset.cast(c1, %{name: "first change"}, ~w(name)a) + c1 = TestRepo.update!(changeset) + assert TestRepo.get_by!(CompositePk, %{a: 1, b: 2}) == c1 + + TestRepo.delete!(c2) + assert TestRepo.all(CompositePk) == [c1] + + assert_raise ArgumentError, ~r"to have exactly one primary key", fn -> + TestRepo.get(CompositePk, []) + end + + assert_raise ArgumentError, ~r"to have exactly one primary key", fn -> + TestRepo.get!(CompositePk, [1, 2]) + end + end + + @tag :composite_pk + test "insert, update and delete with associated composite pk" do + user = TestRepo.insert!(%User{}) + post = TestRepo.insert!(%Post{title: "post title", text: "post text"}) + + user_post = TestRepo.insert!(%PostUserCompositePk{user_id: user.id, post_id: post.id}) + assert TestRepo.get_by!(PostUserCompositePk, [user_id: user.id, post_id: post.id]) == user_post + TestRepo.delete!(user_post) + assert TestRepo.all(PostUserCompositePk) == [] + end + + @tag :invalid_prefix + test "insert, update and delete with invalid prefix" do + post = TestRepo.insert!(%Post{}) + changeset = Ecto.Changeset.change(post, title: "foo") + assert catch_error(TestRepo.insert(%Post{}, prefix: "oops")) + assert catch_error(TestRepo.update(changeset, prefix: "oops")) + assert catch_error(TestRepo.delete(changeset, prefix: "oops")) + end + + test "insert and update with changeset" do + # On insert we merge the fields and changes + changeset = Ecto.Changeset.cast(%Post{text: "x", title: "wrong"}, + %{"title" => "hello", "temp" => "unknown"}, ~w(title temp)a) + + post = TestRepo.insert!(changeset) + assert %Post{text: "x", title: "hello", temp: "unknown"} = post + assert %Post{text: "x", title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id) + + # On update we merge only fields, direct schema changes are discarded + changeset = Ecto.Changeset.cast(%{post | text: "y"}, + %{"title" => "world", "temp" => "unknown"}, ~w(title temp)a) + + assert %Post{text: "y", title: "world", temp: "unknown"} = TestRepo.update!(changeset) + assert %Post{text: "x", title: "world", temp: "temp"} = TestRepo.get!(Post, post.id) + end + + test "insert and update with empty changeset" do + # On insert we merge the fields and changes + changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w()) + assert %Permalink{} = permalink = TestRepo.insert!(changeset) + + # Assert we can update the same value twice, + # without changes, without triggering stale errors. + changeset = Ecto.Changeset.cast(permalink, %{}, ~w()) + assert TestRepo.update!(changeset) == permalink + assert TestRepo.update!(changeset) == permalink + end + + @tag :no_primary_key + test "insert with no primary key" do + assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{}) + assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13}) + end + + @tag :read_after_writes + test "insert and update with changeset read after writes" do + defmodule RAW do + use Ecto.Schema + + schema "comments" do + field :text, :string + field :lock_version, :integer, read_after_writes: true + end + end + + changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w()) + + # If the field is nil, we will not send it + # and read the value back from the database. + assert %{id: cid, lock_version: 1} = raw = TestRepo.insert!(changeset) + + # Set the counter to 11, so we can read it soon + TestRepo.update_all from(u in RAW, where: u.id == ^cid), set: [lock_version: 11] + + # We will read back on update too + changeset = Ecto.Changeset.cast(raw, %{"text" => "0"}, ~w(text)a) + assert %{id: ^cid, lock_version: 11, text: "0"} = TestRepo.update!(changeset) + end + + test "insert autogenerates for custom type" do + post = TestRepo.insert!(%Post{uuid: nil}) + assert byte_size(post.uuid) == 36 + assert TestRepo.get_by(Post, uuid: post.uuid) == post + end + + @tag :id_type + test "insert autogenerates for custom id type" do + defmodule ID do + use Ecto.Schema + + @primary_key {:id, Elixir.Custom.Permalink, autogenerate: true} + schema "posts" do + end + end + + id = TestRepo.insert!(struct(ID, id: nil)) + assert id.id + assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id + end + + @tag :id_type + @tag :assigns_id_type + test "insert with user-assigned primary key" do + assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1}) + end + + @tag :id_type + @tag :assigns_id_type + test "insert and update with user-assigned primary key in changeset" do + changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id)a) + assert %Post{id: 13} = post = TestRepo.insert!(changeset) + + changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id)a) + assert %Post{id: 15} = TestRepo.update!(changeset) + end + + test "insert and fetch a schema with utc timestamps" do + datetime = DateTime.from_unix!(System.system_time(:second), :second) + TestRepo.insert!(%User{inserted_at: datetime}) + assert [%{inserted_at: ^datetime}] = TestRepo.all(User) + end + + test "optimistic locking in update/delete operations" do + import Ecto.Changeset, only: [cast: 3, optimistic_lock: 2] + base_post = TestRepo.insert!(%Comment{}) + + changeset_ok = + base_post + |> cast(%{"text" => "foo.bar"}, ~w(text)a) + |> optimistic_lock(:lock_version) + TestRepo.update!(changeset_ok) + + changeset_stale = optimistic_lock(base_post, :lock_version) + assert_raise Ecto.StaleEntryError, fn -> TestRepo.update!(changeset_stale) end + assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete!(changeset_stale) end + end + + test "optimistic locking in update operation with nil field" do + import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3] + + base_post = + %Comment{} + |> cast(%{lock_version: nil}, [:lock_version]) + |> TestRepo.insert!() + + incrementer = + fn + nil -> 1 + old_value -> old_value + 1 + end + + changeset_ok = + base_post + |> cast(%{"text" => "foo.bar"}, ~w(text)a) + |> optimistic_lock(:lock_version, incrementer) + + updated = TestRepo.update!(changeset_ok) + assert updated.text == "foo.bar" + assert updated.lock_version == 1 + end + + test "optimistic locking in delete operation with nil field" do + import Ecto.Changeset, only: [cast: 3, optimistic_lock: 3] + + base_post = + %Comment{} + |> cast(%{lock_version: nil}, [:lock_version]) + |> TestRepo.insert!() + + incrementer = + fn + nil -> 1 + old_value -> old_value + 1 + end + + changeset_ok = optimistic_lock(base_post, :lock_version, incrementer) + TestRepo.delete!(changeset_ok) + + refute TestRepo.get(Comment, base_post.id) + end + + @tag :unique_constraint + test "unique constraint" do + changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate()) + {:ok, _} = TestRepo.insert(changeset) + + exception = + assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn -> + changeset + |> TestRepo.insert() + end + + assert exception.message =~ "posts_uuid_index (unique_constraint)" + assert exception.message =~ "The changeset has not defined any constraint." + assert exception.message =~ "call `unique_constraint/3`" + + message = ~r/constraint error when attempting to insert struct/ + exception = + assert_raise Ecto.ConstraintError, message, fn -> + changeset + |> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset) + |> TestRepo.insert() + end + + assert exception.message =~ "posts_email_changeset (unique_constraint)" + + {:error, changeset} = + changeset + |> Ecto.Changeset.unique_constraint(:uuid) + |> TestRepo.insert() + assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}] + assert changeset.data.__meta__.state == :built + end + + @tag :unique_constraint + test "unique constraint from association" do + uuid = Ecto.UUID.generate() + post = & %Post{} |> Ecto.Changeset.change(uuid: &1) |> Ecto.Changeset.unique_constraint(:uuid) + + {:error, changeset} = + TestRepo.insert %User{ + comments: [%Comment{}], + permalink: %Permalink{}, + posts: [post.(uuid), post.(uuid), post.(Ecto.UUID.generate)] + } + + [_, p2, _] = changeset.changes.posts + assert p2.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "posts_uuid_index"]}] + end + + @tag :id_type + @tag :unique_constraint + test "unique constraint with binary_id" do + changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate()) + {:ok, _} = TestRepo.insert(changeset) + + {:error, changeset} = + changeset + |> Ecto.Changeset.unique_constraint(:uuid) + |> TestRepo.insert() + assert changeset.errors == [uuid: {"has already been taken", [constraint: :unique, constraint_name: "customs_uuid_index"]}] + assert changeset.data.__meta__.state == :built + end + + test "unique pseudo-constraint violation error message with join table at the repository" do + post = + TestRepo.insert!(%Post{title: "some post"}) + |> TestRepo.preload(:unique_users) + + user = + TestRepo.insert!(%User{name: "some user"}) + + # Violate the unique composite index + {:error, changeset} = + post + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:unique_users, [user, user]) + |> TestRepo.update + + errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) + assert errors == %{unique_users: [%{}, %{id: ["has already been taken"]}]} + refute changeset.valid? + end + + @tag :join + @tag :unique_constraint + test "unique constraint violation error message with join table in single changeset" do + post = + TestRepo.insert!(%Post{title: "some post"}) + |> TestRepo.preload(:constraint_users) + + user = + TestRepo.insert!(%User{name: "some user"}) + + # Violate the unique composite index + {:error, changeset} = + post + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:constraint_users, [user, user]) + |> Ecto.Changeset.unique_constraint(:user, + name: :posts_users_composite_pk_post_id_user_id_index, + message: "has already been assigned") + |> TestRepo.update + + errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) + assert errors == %{constraint_users: [%{}, %{user: ["has already been assigned"]}]} + + refute changeset.valid? + end + + @tag :join + @tag :unique_constraint + test "unique constraint violation error message with join table and separate changesets" do + post = + TestRepo.insert!(%Post{title: "some post"}) + |> TestRepo.preload(:constraint_users) + + user = TestRepo.insert!(%User{name: "some user"}) + + post + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:constraint_users, [user]) + |> TestRepo.update + + # Violate the unique composite index + {:error, changeset} = + post + |> Ecto.Changeset.change + |> Ecto.Changeset.put_assoc(:constraint_users, [user]) + |> Ecto.Changeset.unique_constraint(:user, + name: :posts_users_composite_pk_post_id_user_id_index, + message: "has already been assigned") + |> TestRepo.update + + errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end) + assert errors == %{constraint_users: [%{user: ["has already been assigned"]}]} + + refute changeset.valid? + end + + @tag :foreign_key_constraint + test "foreign key constraint" do + changeset = Ecto.Changeset.change(%Comment{post_id: 0}) + + exception = + assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn -> + changeset + |> TestRepo.insert() + end + + assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)" + assert exception.message =~ "The changeset has not defined any constraint." + assert exception.message =~ "call `foreign_key_constraint/3`" + + message = ~r/constraint error when attempting to insert struct/ + exception = + assert_raise Ecto.ConstraintError, message, fn -> + changeset + |> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other) + |> TestRepo.insert() + end + + assert exception.message =~ "comments_post_id_other (foreign_key_constraint)" + + {:error, changeset} = + changeset + |> Ecto.Changeset.foreign_key_constraint(:post_id) + |> TestRepo.insert() + assert changeset.errors == [post_id: {"does not exist", [constraint: :foreign, constraint_name: "comments_post_id_fkey"]}] + end + + @tag :foreign_key_constraint + test "assoc constraint" do + changeset = Ecto.Changeset.change(%Comment{post_id: 0}) + + exception = + assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert struct/, fn -> + changeset + |> TestRepo.insert() + end + + assert exception.message =~ "comments_post_id_fkey (foreign_key_constraint)" + assert exception.message =~ "The changeset has not defined any constraint." + + message = ~r/constraint error when attempting to insert struct/ + exception = + assert_raise Ecto.ConstraintError, message, fn -> + changeset + |> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other) + |> TestRepo.insert() + end + + assert exception.message =~ "comments_post_id_other (foreign_key_constraint)" + + {:error, changeset} = + changeset + |> Ecto.Changeset.assoc_constraint(:post) + |> TestRepo.insert() + assert changeset.errors == [post: {"does not exist", [constraint: :assoc, constraint_name: "comments_post_id_fkey"]}] + end + + @tag :foreign_key_constraint + test "no assoc constraint error" do + user = TestRepo.insert!(%User{}) + TestRepo.insert!(%Permalink{user_id: user.id}) + + exception = + assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete struct/, fn -> + TestRepo.delete!(user) + end + + assert exception.message =~ "permalinks_user_id_fkey (foreign_key_constraint)" + assert exception.message =~ "The changeset has not defined any constraint." + end + + @tag :foreign_key_constraint + test "no assoc constraint with changeset mismatch" do + user = TestRepo.insert!(%User{}) + TestRepo.insert!(%Permalink{user_id: user.id}) + + message = ~r/constraint error when attempting to delete struct/ + exception = + assert_raise Ecto.ConstraintError, message, fn -> + user + |> Ecto.Changeset.change + |> Ecto.Changeset.no_assoc_constraint(:permalink, name: :permalinks_user_id_pther) + |> TestRepo.delete() + end + + assert exception.message =~ "permalinks_user_id_pther (foreign_key_constraint)" + end + + @tag :foreign_key_constraint + test "no assoc constraint with changeset match" do + user = TestRepo.insert!(%User{}) + TestRepo.insert!(%Permalink{user_id: user.id}) + + {:error, changeset} = + user + |> Ecto.Changeset.change + |> Ecto.Changeset.no_assoc_constraint(:permalink) + |> TestRepo.delete() + assert changeset.errors == [permalink: {"is still associated with this entry", [constraint: :no_assoc, constraint_name: "permalinks_user_id_fkey"]}] + end + + @tag :foreign_key_constraint + test "insert and update with embeds during failing child foreign key" do + changeset = + Order + |> struct(%{}) + |> order_changeset(%{item: %{price: 10}, permalink: %{post_id: 0}}) + + {:error, changeset} = TestRepo.insert(changeset) + assert %Ecto.Changeset{} = changeset.changes.item + + order = + Order + |> struct(%{}) + |> order_changeset(%{}) + |> TestRepo.insert!() + |> TestRepo.preload([:permalink]) + + changeset = order_changeset(order, %{item: %{price: 10}, permalink: %{post_id: 0}}) + assert %Ecto.Changeset{} = changeset.changes.item + + {:error, changeset} = TestRepo.update(changeset) + assert %Ecto.Changeset{} = changeset.changes.item + end + + def order_changeset(order, params) do + order + |> Ecto.Changeset.cast(params, [:permalink_id]) + |> Ecto.Changeset.cast_embed(:item, with: &item_changeset/2) + |> Ecto.Changeset.cast_assoc(:permalink, with: &permalink_changeset/2) + end + + def item_changeset(item, params) do + item + |> Ecto.Changeset.cast(params, [:price]) + end + + def permalink_changeset(comment, params) do + comment + |> Ecto.Changeset.cast(params, [:post_id]) + |> Ecto.Changeset.assoc_constraint(:post) + end + + test "unsafe_validate_unique/3" do + {:ok, inserted_post} = TestRepo.insert(%Post{title: "Greetings", text: "hi"}) + new_post_changeset = Post.changeset(%Post{}, %{title: "Greetings", text: "ho"}) + + changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title], TestRepo) + assert changeset.errors[:title] == + {"has already been taken", validation: :unsafe_unique, fields: [:title]} + + changeset = Ecto.Changeset.unsafe_validate_unique(new_post_changeset, [:title, :text], TestRepo) + assert changeset.errors[:title] == nil + + update_changeset = Post.changeset(inserted_post, %{text: "ho"}) + changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:title], TestRepo) + assert changeset.errors[:title] == nil # cannot conflict with itself + end + + test "unsafe_validate_unique/3 with composite keys" do + {:ok, inserted_post} = TestRepo.insert(%CompositePk{a: 123, b: 456, name: "UniqueName"}) + + different_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 321}) + changeset = Ecto.Changeset.unsafe_validate_unique(different_pk, [:name], TestRepo) + assert changeset.errors[:name] == + {"has already been taken", validation: :unsafe_unique, fields: [:name]} + + partial_pk = CompositePk.changeset(%CompositePk{}, %{name: "UniqueName", a: 789, b: 456}) + changeset = Ecto.Changeset.unsafe_validate_unique(partial_pk, [:name], TestRepo) + assert changeset.errors[:name] == + {"has already been taken", validation: :unsafe_unique, fields: [:name]} + + update_changeset = CompositePk.changeset(inserted_post, %{name: "NewName"}) + changeset = Ecto.Changeset.unsafe_validate_unique(update_changeset, [:name], TestRepo) + assert changeset.valid? + assert changeset.errors[:name] == nil # cannot conflict with itself + end + + test "get(!)" do + post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) + post2 = TestRepo.insert!(%Post{title: "2", text: "hai"}) + + assert post1 == TestRepo.get(Post, post1.id) + assert post2 == TestRepo.get(Post, to_string post2.id) # With casting + + assert post1 == TestRepo.get!(Post, post1.id) + assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting + + TestRepo.delete!(post1) + + assert nil == TestRepo.get(Post, post1.id) + assert_raise Ecto.NoResultsError, fn -> + TestRepo.get!(Post, post1.id) + end + end + + test "get(!) with custom source" do + custom = Ecto.put_meta(%Custom{}, source: "posts") + custom = TestRepo.insert!(custom) + bid = custom.bid + assert %Custom{bid: ^bid, __meta__: %{source: "posts"}} = + TestRepo.get(from(c in {"posts", Custom}), bid) + end + + test "get(!) with binary_id" do + custom = TestRepo.insert!(%Custom{}) + bid = custom.bid + assert %Custom{bid: ^bid} = TestRepo.get(Custom, bid) + end + + test "get_by(!)" do + post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) + post2 = TestRepo.insert!(%Post{title: "2", text: "hello"}) + + assert post1 == TestRepo.get_by(Post, id: post1.id) + assert post1 == TestRepo.get_by(Post, text: post1.text) + assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text) + assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting + assert nil == TestRepo.get_by(Post, text: "hey") + assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey") + + assert post1 == TestRepo.get_by!(Post, id: post1.id) + assert post1 == TestRepo.get_by!(Post, text: post1.text) + assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text) + assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting + + assert post1 == TestRepo.get_by!(Post, %{id: post1.id}) + + assert_raise Ecto.NoResultsError, fn -> + TestRepo.get_by!(Post, id: post2.id, text: "hey") + end + end + + test "first, last and one(!)" do + post1 = TestRepo.insert!(%Post{title: "1", text: "hai"}) + post2 = TestRepo.insert!(%Post{title: "2", text: "hai"}) + + assert post1 == Post |> first |> TestRepo.one + assert post2 == Post |> last |> TestRepo.one + + query = from p in Post, order_by: p.title + assert post1 == query |> first |> TestRepo.one + assert post2 == query |> last |> TestRepo.one + + query = from p in Post, order_by: [desc: p.title], limit: 10 + assert post2 == query |> first |> TestRepo.one + assert post1 == query |> last |> TestRepo.one + + query = from p in Post, where: is_nil(p.id) + refute query |> first |> TestRepo.one + refute query |> last |> TestRepo.one + assert_raise Ecto.NoResultsError, fn -> query |> first |> TestRepo.one! end + assert_raise Ecto.NoResultsError, fn -> query |> last |> TestRepo.one! end + end + + test "exists?" do + TestRepo.insert!(%Post{title: "1", text: "hai", visits: 2}) + TestRepo.insert!(%Post{title: "2", text: "hai", visits: 1}) + + query = from p in Post, where: not is_nil(p.title), limit: 2 + assert query |> TestRepo.exists? == true + + query = from p in Post, where: p.title == "1", select: p.title + assert query |> TestRepo.exists? == true + + query = from p in Post, where: is_nil(p.id) + assert query |> TestRepo.exists? == false + + query = from p in Post, where: is_nil(p.id) + assert query |> TestRepo.exists? == false + + query = from(p in Post, select: {p.visits, avg(p.visits)}, group_by: p.visits, having: avg(p.visits) > 1) + assert query |> TestRepo.exists? == true + end + + test "aggregate" do + assert TestRepo.aggregate(Post, :max, :visits) == nil + + TestRepo.insert!(%Post{visits: 10}) + TestRepo.insert!(%Post{visits: 12}) + TestRepo.insert!(%Post{visits: 14}) + TestRepo.insert!(%Post{visits: 14}) + + # Barebones + assert TestRepo.aggregate(Post, :max, :visits) == 14 + assert TestRepo.aggregate(Post, :min, :visits) == 10 + assert TestRepo.aggregate(Post, :count, :visits) == 4 + assert "50" = to_string(TestRepo.aggregate(Post, :sum, :visits)) + assert "12.5" <> _ = to_string(TestRepo.aggregate(Post, :avg, :visits)) + + # With order_by + query = from Post, order_by: [asc: :visits] + assert TestRepo.aggregate(query, :max, :visits) == 14 + + # With order_by and limit + query = from Post, order_by: [asc: :visits], limit: 2 + assert TestRepo.aggregate(query, :max, :visits) == 12 + + # With distinct + query = from Post, order_by: [asc: :visits], distinct: true + assert TestRepo.aggregate(query, :count, :visits) == 3 + end + + @tag :insert_cell_wise_defaults + test "insert all" do + assert {2, nil} = TestRepo.insert_all("comments", [[text: "1"], %{text: "2", lock_version: 2}]) + assert {2, nil} = TestRepo.insert_all({"comments", Comment}, [[text: "3"], %{text: "4", lock_version: 2}]) + assert [%Comment{text: "1", lock_version: 1}, + %Comment{text: "2", lock_version: 2}, + %Comment{text: "3", lock_version: 1}, + %Comment{text: "4", lock_version: 2}] = TestRepo.all(Comment) + + assert {2, nil} = TestRepo.insert_all(Post, [[], []]) + assert [%Post{}, %Post{}] = TestRepo.all(Post) + + assert {0, nil} = TestRepo.insert_all("posts", []) + assert {0, nil} = TestRepo.insert_all({"posts", Post}, []) + end + + @tag :insert_select + test "insert all with query" do + comment = TestRepo.insert!(%Comment{text: "1", lock_version: 1}) + + text_query = from(c in Comment, select: c.text, where: [id: ^comment.id, lock_version: 1]) + + lock_version_query = from(c in Comment, select: c.lock_version, where: [id: ^comment.id]) + + rows = [ + [text: "2", lock_version: lock_version_query], + [lock_version: lock_version_query, text: "3"], + [text: text_query], + [text: text_query, lock_version: lock_version_query], + [lock_version: 6, text: "6"] + ] + assert {5, nil} = TestRepo.insert_all(Comment, rows, []) + + inserted_rows = Comment + |> where([c], c.id != ^comment.id) + |> TestRepo.all() + + assert [%Comment{text: "2", lock_version: 1}, + %Comment{text: "3", lock_version: 1}, + %Comment{text: "1"}, + %Comment{text: "1", lock_version: 1}, + %Comment{text: "6", lock_version: 6}] = inserted_rows + end + + @tag :invalid_prefix + @tag :insert_cell_wise_defaults + test "insert all with invalid prefix" do + assert catch_error(TestRepo.insert_all(Post, [[], []], prefix: "oops")) + end + + @tag :returning + @tag :insert_cell_wise_defaults + test "insert all with returning with schema" do + assert {0, []} = TestRepo.insert_all(Comment, [], returning: true) + assert {0, nil} = TestRepo.insert_all(Comment, [], returning: false) + + {2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "1"], [text: "2"]], returning: [:id, :text]) + assert %Comment{text: "1", __meta__: %{state: :loaded}} = c1 + assert %Comment{text: "2", __meta__: %{state: :loaded}} = c2 + + {2, [c1, c2]} = TestRepo.insert_all(Comment, [[text: "3"], [text: "4"]], returning: true) + assert %Comment{text: "3", __meta__: %{state: :loaded}} = c1 + assert %Comment{text: "4", __meta__: %{state: :loaded}} = c2 + end + + @tag :returning + @tag :insert_cell_wise_defaults + test "insert all with returning with schema with field source" do + assert {0, []} = TestRepo.insert_all(Permalink, [], returning: true) + assert {0, nil} = TestRepo.insert_all(Permalink, [], returning: false) + + {2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "1"], [url: "2"]], returning: [:id, :url]) + assert %Permalink{url: "1", __meta__: %{state: :loaded}} = c1 + assert %Permalink{url: "2", __meta__: %{state: :loaded}} = c2 + + {2, [c1, c2]} = TestRepo.insert_all(Permalink, [[url: "3"], [url: "4"]], returning: true) + assert %Permalink{url: "3", __meta__: %{state: :loaded}} = c1 + assert %Permalink{url: "4", __meta__: %{state: :loaded}} = c2 + end + + @tag :returning + @tag :insert_cell_wise_defaults + test "insert all with returning without schema" do + {2, [c1, c2]} = TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: [:id, :text]) + assert %{id: _, text: "1"} = c1 + assert %{id: _, text: "2"} = c2 + + assert_raise ArgumentError, fn -> + TestRepo.insert_all("comments", [[text: "1"], [text: "2"]], returning: true) + end + end + + @tag :insert_cell_wise_defaults + test "insert all with dumping" do + uuid = Ecto.UUID.generate + assert {1, nil} = TestRepo.insert_all(Post, [%{uuid: uuid}]) + assert [%Post{uuid: ^uuid, title: nil}] = TestRepo.all(Post) + end + + @tag :insert_cell_wise_defaults + test "insert all autogenerates for binary_id type" do + custom = TestRepo.insert!(%Custom{bid: nil}) + assert custom.bid + assert TestRepo.get(Custom, custom.bid) + assert TestRepo.delete!(custom) + refute TestRepo.get(Custom, custom.bid) + + uuid = Ecto.UUID.generate + assert {2, nil} = TestRepo.insert_all(Custom, [%{uuid: uuid}, %{bid: custom.bid}]) + assert [%Custom{bid: bid2, uuid: nil}, + %Custom{bid: bid1, uuid: ^uuid}] = Enum.sort_by(TestRepo.all(Custom), & &1.uuid) + assert bid1 && bid2 + assert custom.bid != bid1 + assert custom.bid == bid2 + end + + test "update all" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) + + assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"]) + + assert %Post{title: "x"} = TestRepo.get(Post, id1) + assert %Post{title: "x"} = TestRepo.get(Post, id2) + assert %Post{title: "x"} = TestRepo.get(Post, id3) + + assert {3, nil} = TestRepo.update_all("posts", [set: [title: nil]]) + + assert %Post{title: nil} = TestRepo.get(Post, id1) + assert %Post{title: nil} = TestRepo.get(Post, id2) + assert %Post{title: nil} = TestRepo.get(Post, id3) + end + + @tag :invalid_prefix + test "update all with invalid prefix" do + assert catch_error(TestRepo.update_all(Post, [set: [title: "x"]], prefix: "oops")) + end + + @tag :returning + test "update all with returning with schema" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) + + assert {3, posts} = TestRepo.update_all(select(Post, [p], p), [set: [title: "x"]]) + + [p1, p2, p3] = Enum.sort_by(posts, & &1.id) + assert %Post{id: ^id1, title: "x"} = p1 + assert %Post{id: ^id2, title: "x"} = p2 + assert %Post{id: ^id3, title: "x"} = p3 + + assert {3, posts} = TestRepo.update_all(select(Post, [:id, :visits]), [set: [visits: 11]]) + + [p1, p2, p3] = Enum.sort_by(posts, & &1.id) + assert %Post{id: ^id1, title: nil, visits: 11} = p1 + assert %Post{id: ^id2, title: nil, visits: 11} = p2 + assert %Post{id: ^id3, title: nil, visits: 11} = p3 + end + + @tag :returning + test "update all with returning without schema" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) + + assert {3, posts} = TestRepo.update_all(select("posts", [:id, :title]), [set: [title: "x"]]) + + [p1, p2, p3] = Enum.sort_by(posts, & &1.id) + assert p1 == %{id: id1, title: "x"} + assert p2 == %{id: id2, title: "x"} + assert p3 == %{id: id3, title: "x"} + end + + test "update all with filter" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) + + query = from(p in Post, where: p.title == "1" or p.title == "2", + update: [set: [text: ^"y"]]) + assert {2, nil} = TestRepo.update_all(query, set: [title: "x"]) + + assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1) + assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2) + assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3) + end + + test "update all no entries" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"}) + + query = from(p in Post, where: p.title == "4") + assert {0, nil} = TestRepo.update_all(query, set: [title: "x"]) + + assert %Post{title: "1"} = TestRepo.get(Post, id1) + assert %Post{title: "2"} = TestRepo.get(Post, id2) + assert %Post{title: "3"} = TestRepo.get(Post, id3) + end + + test "update all increment syntax" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1}) + + # Positive + query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]] + assert {2, nil} = TestRepo.update_all(query, []) + + assert %Post{visits: 2} = TestRepo.get(Post, id1) + assert %Post{visits: 3} = TestRepo.get(Post, id2) + + # Negative + query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]] + assert {2, nil} = TestRepo.update_all(query, []) + + assert %Post{visits: 1} = TestRepo.get(Post, id1) + assert %Post{visits: 2} = TestRepo.get(Post, id2) + end + + @tag :id_type + test "update all with casting and dumping on id type field" do + assert %Post{id: id1} = TestRepo.insert!(%Post{}) + assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)]) + assert %Post{counter: ^id1} = TestRepo.get(Post, id1) + end + + test "update all with casting and dumping" do + text = "hai" + datetime = ~N[2014-01-16 20:26:51] + assert %Post{id: id} = TestRepo.insert!(%Post{}) + + assert {1, nil} = TestRepo.update_all(Post, set: [text: text, inserted_at: datetime]) + assert %Post{text: "hai", inserted_at: ^datetime} = TestRepo.get(Post, id) + end + + test "delete all" do + assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"}) + assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"}) + + assert {3, nil} = TestRepo.delete_all(Post) + assert [] = TestRepo.all(Post) + end + + @tag :invalid_prefix + test "delete all with invalid prefix" do + assert catch_error(TestRepo.delete_all(Post, prefix: "oops")) + end + + @tag :returning + test "delete all with returning with schema" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) + + assert {3, posts} = TestRepo.delete_all(select(Post, [p], p)) + + [p1, p2, p3] = Enum.sort_by(posts, & &1.id) + assert %Post{id: ^id1, title: "1"} = p1 + assert %Post{id: ^id2, title: "2"} = p2 + assert %Post{id: ^id3, title: "3"} = p3 + end + + @tag :returning + test "delete all with returning without schema" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) + + assert {3, posts} = TestRepo.delete_all(select("posts", [:id, :title])) + + [p1, p2, p3] = Enum.sort_by(posts, & &1.id) + assert p1 == %{id: id1, title: "1"} + assert p2 == %{id: id2, title: "2"} + assert p3 == %{id: id3, title: "3"} + end + + test "delete all with filter" do + assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"}) + assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"}) + + query = from(p in Post, where: p.title == "1" or p.title == "2") + assert {2, nil} = TestRepo.delete_all(query) + assert [%Post{}] = TestRepo.all(Post) + end + + test "delete all no entries" do + assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"}) + assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"}) + + query = from(p in Post, where: p.title == "4") + assert {0, nil} = TestRepo.delete_all(query) + assert %Post{title: "1"} = TestRepo.get(Post, id1) + assert %Post{title: "2"} = TestRepo.get(Post, id2) + assert %Post{title: "3"} = TestRepo.get(Post, id3) + end + + test "virtual field" do + assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + assert TestRepo.get(Post, id).temp == "temp" + end + + ## Query syntax + + defmodule Foo do + defstruct [:title] + end + + describe "query select" do + test "expressions" do + %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + + assert [{"1", "hai"}] == + TestRepo.all(from p in Post, select: {p.title, p.text}) + + assert [["1", "hai"]] == + TestRepo.all(from p in Post, select: [p.title, p.text]) + + assert [%{:title => "1", 3 => "hai", "text" => "hai"}] == + TestRepo.all(from p in Post, select: %{ + :title => p.title, + "text" => p.text, + 3 => p.text + }) + + assert [%{:title => "1", "1" => "hai", "text" => "hai"}] == + TestRepo.all(from p in Post, select: %{ + :title => p.title, + p.title => p.text, + "text" => p.text + }) + + assert [%Foo{title: "1"}] == + TestRepo.all(from p in Post, select: %Foo{title: p.title}) + end + + test "map update" do + %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"}) + + assert [%Post{:title => "new title", text: "hai"}] = + TestRepo.all(from p in Post, select: %{p | title: "new title"}) + + assert [%Post{title: "new title", text: "hai"}] = + TestRepo.all(from p in Post, select: %Post{p | title: "new title"}) + + assert_raise KeyError, fn -> + TestRepo.all(from p in Post, select: %{p | unknown: "new title"}) + end + + assert_raise BadMapError, fn -> + TestRepo.all(from p in Post, select: %{p.title | title: "new title"}) + end + + assert_raise BadStructError, fn -> + TestRepo.all(from p in Post, select: %Foo{p | title: p.title}) + end + end + + test "take with structs" do + %{id: pid1} = TestRepo.insert!(%Post{title: "1"}) + %{id: pid2} = TestRepo.insert!(%Post{title: "2"}) + %{id: pid3} = TestRepo.insert!(%Post{title: "3"}) + + [p1, p2, p3] = Post |> select([p], struct(p, [:title])) |> order_by([:title]) |> TestRepo.all + refute p1.id + assert p1.title == "1" + assert match?(%Post{}, p1) + refute p2.id + assert p2.title == "2" + assert match?(%Post{}, p2) + refute p3.id + assert p3.title == "3" + assert match?(%Post{}, p3) + + [p1, p2, p3] = Post |> select([:id]) |> order_by([:id]) |> TestRepo.all + assert %Post{id: ^pid1} = p1 + assert %Post{id: ^pid2} = p2 + assert %Post{id: ^pid3} = p3 + end + + test "take with maps" do + %{id: pid1} = TestRepo.insert!(%Post{title: "1"}) + %{id: pid2} = TestRepo.insert!(%Post{title: "2"}) + %{id: pid3} = TestRepo.insert!(%Post{title: "3"}) + + [p1, p2, p3] = "posts" |> select([p], map(p, [:title])) |> order_by([:title]) |> TestRepo.all + assert p1 == %{title: "1"} + assert p2 == %{title: "2"} + assert p3 == %{title: "3"} + + [p1, p2, p3] = "posts" |> select([:id]) |> order_by([:id]) |> TestRepo.all + assert p1 == %{id: pid1} + assert p2 == %{id: pid2} + assert p3 == %{id: pid3} + end + + test "take with preload assocs" do + %{id: pid} = TestRepo.insert!(%Post{title: "post"}) + TestRepo.insert!(%Comment{post_id: pid, text: "comment"}) + fields = [:id, :title, comments: [:text, :post_id]] + + [p] = Post |> preload(:comments) |> select([p], ^fields) |> TestRepo.all + assert %Post{title: "post"} = p + assert [%Comment{text: "comment"}] = p.comments + + [p] = Post |> preload(:comments) |> select([p], struct(p, ^fields)) |> TestRepo.all + assert %Post{title: "post"} = p + assert [%Comment{text: "comment"}] = p.comments + + [p] = Post |> preload(:comments) |> select([p], map(p, ^fields)) |> TestRepo.all + assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid}]} + end + + test "take with nil preload assoc" do + %{id: cid} = TestRepo.insert!(%Comment{text: "comment"}) + fields = [:id, :text, post: [:title]] + + [c] = Comment |> preload(:post) |> select([c], ^fields) |> TestRepo.all + assert %Comment{id: ^cid, text: "comment", post: nil} = c + + [c] = Comment |> preload(:post) |> select([c], struct(c, ^fields)) |> TestRepo.all + assert %Comment{id: ^cid, text: "comment", post: nil} = c + + [c] = Comment |> preload(:post) |> select([c], map(c, ^fields)) |> TestRepo.all + assert c == %{id: cid, text: "comment", post: nil} + end + + test "take with join assocs" do + %{id: pid} = TestRepo.insert!(%Post{title: "post"}) + %{id: cid} = TestRepo.insert!(%Comment{post_id: pid, text: "comment"}) + fields = [:id, :title, comments: [:text, :post_id, :id]] + query = from p in Post, where: p.id == ^pid, join: c in assoc(p, :comments), preload: [comments: c] + + p = TestRepo.one(from q in query, select: ^fields) + assert %Post{title: "post"} = p + assert [%Comment{text: "comment"}] = p.comments + + p = TestRepo.one(from q in query, select: struct(q, ^fields)) + assert %Post{title: "post"} = p + assert [%Comment{text: "comment"}] = p.comments + + p = TestRepo.one(from q in query, select: map(q, ^fields)) + assert p == %{id: pid, title: "post", comments: [%{text: "comment", post_id: pid, id: cid}]} + end + + test "take with single nil column" do + %Post{} = TestRepo.insert!(%Post{title: "1", counter: nil}) + assert %{counter: nil} = + TestRepo.one(from p in Post, where: p.title == "1", select: [:counter]) + end + + test "take with join assocs and single nil column" do + %{id: post_id} = TestRepo.insert!(%Post{title: "1"}, counter: nil) + TestRepo.insert!(%Comment{post_id: post_id, text: "comment"}) + assert %{counter: nil} == + TestRepo.one(from p in Post, join: c in assoc(p, :comments), where: p.title == "1", select: map(p, [:counter])) + end + + test "field source" do + TestRepo.insert!(%Permalink{url: "url"}) + assert ["url"] = Permalink |> select([p], p.url) |> TestRepo.all() + assert [1] = Permalink |> select([p], count(p.url)) |> TestRepo.all() + end + + test "merge" do + %Post{} = TestRepo.insert!(%Post{title: "1", counter: nil}) + + # Merge on source + assert [%Post{title: "2"}] = + Post |> select([p], merge(p, %{title: "2"})) |> TestRepo.all() + assert [%Post{title: "2"}] = + Post |> select([p], p) |> select_merge([p], %{title: "2"}) |> TestRepo.all() + + # Merge on struct + assert [%Post{title: "2"}] = + Post |> select([p], merge(%Post{title: p.title}, %{title: "2"})) |> TestRepo.all() + assert [%Post{title: "2"}] = + Post |> select([p], %Post{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all() + + # Merge on map + assert [%{title: "2"}] = + Post |> select([p], merge(%{title: p.title}, %{title: "2"})) |> TestRepo.all() + assert [%{title: "2"}] = + Post |> select([p], %{title: p.title}) |> select_merge([p], %{title: "2"}) |> TestRepo.all() + end + + test "merge with update on self" do + %Post{} = TestRepo.insert!(%Post{title: "1", counter: 1}) + + assert [%Post{title: "1", counter: 2}] = + Post |> select([p], merge(p, %{p | counter: 2})) |> TestRepo.all() + assert [%Post{title: "1", counter: 2}] = + Post |> select([p], p) |> select_merge([p], %{p | counter: 2}) |> TestRepo.all() + end + end + + test "query count distinct" do + TestRepo.insert!(%Post{title: "1"}) + TestRepo.insert!(%Post{title: "1"}) + TestRepo.insert!(%Post{title: "2"}) + + assert [3] == Post |> select([p], count(p.title)) |> TestRepo.all + assert [2] == Post |> select([p], count(p.title, :distinct)) |> TestRepo.all + end + + test "query where interpolation" do + post1 = TestRepo.insert!(%Post{text: "x", title: "hello"}) + post2 = TestRepo.insert!(%Post{text: "y", title: "goodbye"}) + + assert [post1, post2] == Post |> where([], []) |> TestRepo.all |> Enum.sort_by(& &1.id) + assert [post1] == Post |> where([], [title: "hello"]) |> TestRepo.all + assert [post1] == Post |> where([], [title: "hello", id: ^post1.id]) |> TestRepo.all + + params0 = [] + params1 = [title: "hello"] + params2 = [title: "hello", id: post1.id] + assert [post1, post2] == (from Post, where: ^params0) |> TestRepo.all |> Enum.sort_by(& &1.id) + assert [post1] == (from Post, where: ^params1) |> TestRepo.all + assert [post1] == (from Post, where: ^params2) |> TestRepo.all + + post3 = TestRepo.insert!(%Post{text: "y", title: "goodbye", uuid: nil}) + params3 = [title: "goodbye", uuid: post3.uuid] + assert [post3] == (from Post, where: ^params3) |> TestRepo.all + end + + describe "upsert via insert" do + @describetag :upsert + + test "on conflict raise" do + {:ok, inserted} = TestRepo.insert(%Post{title: "first"}, on_conflict: :raise) + assert catch_error(TestRepo.insert(%Post{id: inserted.id, title: "second"}, on_conflict: :raise)) + end + + test "on conflict ignore" do + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing) + assert inserted.id + assert inserted.__meta__.state == :loaded + + {:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing) + assert not_inserted.id == nil + assert not_inserted.__meta__.state == :loaded + end + + @tag :with_conflict_target + test "on conflict and associations" do + on_conflict = [set: [title: "second"]] + post = %Post{uuid: Ecto.UUID.generate(), + title: "first", comments: [%Comment{}]} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid]) + assert inserted.id + end + + @tag :with_conflict_target + test "on conflict with inc" do + uuid = "6fa459ea-ee8a-3ca4-894e-db77e160355e" + post = %Post{title: "first", uuid: uuid} + {:ok, _} = TestRepo.insert(post) + post = %{title: "upsert", uuid: uuid} + TestRepo.insert_all(Post, [post], on_conflict: [inc: [visits: 1]], conflict_target: :uuid) + end + + @tag :with_conflict_target + test "on conflict ignore and conflict target" do + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) + assert inserted.id + + # Error on non-conflict target + assert catch_error(TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:id])) + + # Error on conflict target + {:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) + assert not_inserted.id == nil + end + + @tag :without_conflict_target + test "on conflict keyword list" do + on_conflict = [set: [title: "second"]] + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict) + assert inserted.id + + {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict) + assert updated.id == inserted.id + assert updated.title != "second" + assert TestRepo.get!(Post, inserted.id).title == "second" + end + + @tag :with_conflict_target + test "on conflict keyword list and conflict target" do + on_conflict = [set: [title: "second"]] + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid]) + assert inserted.id + + # Error on non-conflict target + assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id])) + + {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid]) + assert updated.id == inserted.id + assert updated.title != "second" + assert TestRepo.get!(Post, inserted.id).title == "second" + end + + @tag :returning + @tag :with_conflict_target + test "on conflict keyword list and conflict target and returning" do + {:ok, c1} = TestRepo.insert(%Post{}) + {:ok, c2} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: [:id, :uuid]) + {:ok, c3} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: true) + {:ok, c4} = TestRepo.insert(%Post{id: c1.id}, on_conflict: [set: [id: c1.id]], conflict_target: [:id], returning: false) + + assert c2.uuid == c1.uuid + assert c3.uuid == c1.uuid + assert c4.uuid != c1.uuid + end + + @tag :with_conflict_target + @tag :with_conflict_target_on_constraint + test "on conflict keyword list and conflict target on constraint" do + on_conflict = [set: [title: "new"]] + post = %Post{title: "old"} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey}) + assert inserted.id + + {:ok, updated} = TestRepo.insert(%{post | id: inserted.id}, on_conflict: on_conflict, conflict_target: {:constraint, :posts_pkey}) + assert updated.id == inserted.id + assert updated.title != "new" + assert TestRepo.get!(Post, inserted.id).title == "new" + end + + @tag :returning + @tag :with_conflict_target + test "on conflict keyword list and conflict target and returning and field source" do + TestRepo.insert!(%Permalink{url: "old"}) + {:ok, c1} = TestRepo.insert(%Permalink{url: "old"}, + on_conflict: [set: [url: "new1"]], + conflict_target: [:url], + returning: [:url]) + + TestRepo.insert!(%Permalink{url: "old"}) + {:ok, c2} = TestRepo.insert(%Permalink{url: "old"}, + on_conflict: [set: [url: "new2"]], + conflict_target: [:url], + returning: true) + + assert c1.url == "new1" + assert c2.url == "new2" + end + + @tag :returning + @tag :with_conflict_target + test "on conflict ignore and returning" do + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid]) + assert inserted.id + + {:ok, not_inserted} = TestRepo.insert(post, on_conflict: :nothing, conflict_target: [:uuid], returning: true) + assert not_inserted.id == nil + end + + @tag :without_conflict_target + test "on conflict query" do + on_conflict = from Post, update: [set: [title: "second"]] + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict) + assert inserted.id + + {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict) + assert updated.id == inserted.id + assert updated.title != "second" + assert TestRepo.get!(Post, inserted.id).title == "second" + end + + @tag :with_conflict_target + test "on conflict query and conflict target" do + on_conflict = from Post, update: [set: [title: "second"]] + post = %Post{title: "first", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid]) + assert inserted.id + + # Error on non-conflict target + assert catch_error(TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:id])) + + {:ok, updated} = TestRepo.insert(post, on_conflict: on_conflict, conflict_target: [:uuid]) + assert updated.id == inserted.id + assert updated.title != "second" + assert TestRepo.get!(Post, inserted.id).title == "second" + end + + @tag :with_conflict_target + test "on conflict query having condition" do + post = %Post{title: "first", counter: 1, uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post) + + on_conflict = from Post, where: [counter: 2], update: [set: [title: "second"]] + + insert_options = [ + on_conflict: on_conflict, + conflict_target: [:uuid], + stale_error_field: :counter + ] + + assert {:error, changeset} = TestRepo.insert(post, insert_options) + assert changeset.errors == [counter: {"is stale", [stale: true]}] + + assert TestRepo.get!(Post, inserted.id).title == "first" + end + + @tag :without_conflict_target + test "on conflict replace_all" do + post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all) + assert inserted.id + + post = %Post{title: "updated", text: "updated", uuid: post.uuid} + post = TestRepo.insert!(post, on_conflict: :replace_all) + assert post.id != inserted.id + assert post.title == "updated" + assert post.text == "updated" + + assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) == + [{post.id, "updated", "updated"}] + assert TestRepo.all(from p in Post, select: count(p.id)) == [1] + end + + @tag :with_conflict_target + test "on conflict replace_all and conflict target" do + post = %Post{title: "first", text: "text", uuid: Ecto.UUID.generate()} + {:ok, inserted} = TestRepo.insert(post, on_conflict: :replace_all, conflict_target: :uuid) + assert inserted.id + + post = %Post{title: "updated", text: "updated", uuid: post.uuid} + post = TestRepo.insert!(post, on_conflict: :replace_all, conflict_target: :uuid) + assert post.id != inserted.id + assert post.title == "updated" + assert post.text == "updated" + + assert TestRepo.all(from p in Post, select: {p.id, p.title, p.text}) == + [{post.id, "updated", "updated"}] + assert TestRepo.all(from p in Post, select: count(p.id)) == [1] + end + end + + describe "upsert via insert_all" do + @describetag :upsert_all + + test "on conflict raise" do + post = [title: "first", uuid: Ecto.UUID.generate()] + {1, nil} = TestRepo.insert_all(Post, [post], on_conflict: :raise) + assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :raise)) + end + + test "on conflict ignore" do + post = [title: "first", uuid: Ecto.UUID.generate()] + assert TestRepo.insert_all(Post, [post], on_conflict: :nothing) == {1, nil} + + # PG returns 0, MySQL returns 1 + {entries, nil} = TestRepo.insert_all(Post, [post], on_conflict: :nothing) + assert entries == 0 or entries == 1 + + assert length(TestRepo.all(Post)) == 1 + end + + @tag :with_conflict_target + test "on conflict ignore and conflict target" do + post = [title: "first", uuid: Ecto.UUID.generate()] + assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) == + {1, nil} + + # Error on non-conflict target + assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:id])) + + # Error on conflict target + assert TestRepo.insert_all(Post, [post], on_conflict: :nothing, conflict_target: [:uuid]) == + {0, nil} + end + + @tag :with_conflict_target + test "on conflict keyword list and conflict target" do + on_conflict = [set: [title: "second"]] + post = [title: "first", uuid: Ecto.UUID.generate()] + {1, nil} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) + + # Error on non-conflict target + assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id])) + + # Error on conflict target + assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) == + {1, nil} + assert TestRepo.all(from p in Post, select: p.title) == ["second"] + end + + @tag :with_conflict_target + @tag :returning + test "on conflict keyword list and conflict target and returning and source field" do + on_conflict = [set: [url: "new"]] + permalink = [url: "old"] + + assert {1, [%Permalink{url: "old"}]} = + TestRepo.insert_all(Permalink, [permalink], + on_conflict: on_conflict, conflict_target: [:url], returning: [:url]) + + assert {1, [%Permalink{url: "new"}]} = + TestRepo.insert_all(Permalink, [permalink], + on_conflict: on_conflict, conflict_target: [:url], returning: [:url]) + end + + @tag :with_conflict_target + test "on conflict query and conflict target" do + on_conflict = from Post, update: [set: [title: "second"]] + post = [title: "first", uuid: Ecto.UUID.generate()] + assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) == + {1, nil} + + # Error on non-conflict target + assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:id])) + + # Error on conflict target + assert TestRepo.insert_all(Post, [post], on_conflict: on_conflict, conflict_target: [:uuid]) == + {1, nil} + assert TestRepo.all(from p in Post, select: p.title) == ["second"] + end + + @tag :returning + @tag :with_conflict_target + test "on conflict query and conflict target and returning" do + on_conflict = from Post, update: [set: [title: "second"]] + post = [title: "first", uuid: Ecto.UUID.generate()] + {1, [%{id: id}]} = TestRepo.insert_all(Post, [post], on_conflict: on_conflict, + conflict_target: [:uuid], returning: [:id]) + + # Error on non-conflict target + assert catch_error(TestRepo.insert_all(Post, [post], on_conflict: on_conflict, + conflict_target: [:id], returning: [:id])) + + # Error on conflict target + {1, [%Post{id: ^id, title: "second"}]} = + TestRepo.insert_all(Post, [post], on_conflict: on_conflict, + conflict_target: [:uuid], returning: [:id, :title]) + end + + @tag :with_conflict_target + test "source (without an ecto schema) on conflict query and conflict target" do + on_conflict = [set: [title: "second"]] + {:ok, uuid} = Ecto.UUID.dump(Ecto.UUID.generate()) + post = [title: "first", uuid: uuid] + assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) == + {1, nil} + + # Error on non-conflict target + assert catch_error(TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:id])) + + # Error on conflict target + assert TestRepo.insert_all("posts", [post], on_conflict: on_conflict, conflict_target: [:uuid]) == + {1, nil} + assert TestRepo.all(from p in Post, select: p.title) == ["second"] + end + + @tag :without_conflict_target + test "on conflict replace_all" do + post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate} + post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate} + + {:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all) + {:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all) + + assert post_first.id + assert post_second.id + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + # Multiple record change value: note IDS are also replaced + changes = [%{id: post_first.id + 2, title: "first_updated", + text: "first_updated", uuid: post_first.uuid}, + %{id: post_second.id + 2, title: "second_updated", + text: "second_updated", uuid: post_second.uuid}] + + TestRepo.insert_all(Post, changes, on_conflict: :replace_all) + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + updated_first = TestRepo.get(Post, post_first.id + 2) + assert updated_first.title == "first_updated" + assert updated_first.text == "first_updated" + + updated_second = TestRepo.get(Post, post_second.id + 2) + assert updated_second.title == "second_updated" + assert updated_second.text == "second_updated" + end + + @tag :with_conflict_target + test "on conflict replace_all and conflict_target" do + post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()} + post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()} + + {:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all, conflict_target: :uuid) + {:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all, conflict_target: :uuid) + + assert post_first.id + assert post_second.id + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + # Multiple record change value: note IDS are also replaced + changes = [%{id: post_second.id + 1, title: "first_updated", + text: "first_updated", uuid: post_first.uuid}, + %{id: post_second.id + 2, title: "second_updated", + text: "second_updated", uuid: post_second.uuid}] + + TestRepo.insert_all(Post, changes, on_conflict: :replace_all, conflict_target: :uuid) + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + updated_first = TestRepo.get(Post, post_second.id + 1) + assert updated_first.title == "first_updated" + assert updated_first.text == "first_updated" + + updated_second = TestRepo.get(Post, post_second.id + 2) + assert updated_second.title == "second_updated" + assert updated_second.text == "second_updated" + end + + @tag :without_conflict_target + test "on conflict replace_all_except_primary_key" do + post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate} + post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate} + + {:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key) + {:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key) + + assert post_first.id + assert post_second.id + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + # Multiple record change value: note IDS are not replaced + changes = [%{id: post_first.id + 2, title: "first_updated", + text: "first_updated", uuid: post_first.uuid}, + %{id: post_second.id + 2, title: "second_updated", + text: "second_updated", uuid: post_second.uuid}] + + TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key) + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + updated_first = TestRepo.get(Post, post_first.id) + assert updated_first.title == "first_updated" + assert updated_first.text == "first_updated" + + updated_second = TestRepo.get(Post, post_second.id) + assert updated_second.title == "second_updated" + assert updated_second.text == "second_updated" + end + + @tag :with_conflict_target + test "on conflict replace_all_except_primary_key and conflict_target" do + post_first = %Post{title: "first", public: true, uuid: Ecto.UUID.generate()} + post_second = %Post{title: "second", public: false, uuid: Ecto.UUID.generate()} + + {:ok, post_first} = TestRepo.insert(post_first, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid) + {:ok, post_second} = TestRepo.insert(post_second, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid) + + assert post_first.id + assert post_second.id + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + # Multiple record change value: note IDS are not replaced + changes = [%{id: post_first.id + 2, title: "first_updated", + text: "first_updated", uuid: post_first.uuid}, + %{id: post_second.id + 2, title: "second_updated", + text: "second_updated", uuid: post_second.uuid}] + + TestRepo.insert_all(Post, changes, on_conflict: :replace_all_except_primary_key, conflict_target: :uuid) + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + updated_first = TestRepo.get(Post, post_first.id) + assert updated_first.title == "first_updated" + assert updated_first.text == "first_updated" + + updated_second = TestRepo.get(Post, post_second.id) + assert updated_second.title == "second_updated" + assert updated_second.text == "second_updated" + end + + @tag :with_conflict_target + test "on conflict replace and conflict_target" do + post_first = %Post{title: "first", visits: 10, public: true, uuid: Ecto.UUID.generate} + post_second = %Post{title: "second", visits: 20, public: false, uuid: Ecto.UUID.generate} + + {:ok, post_first} = TestRepo.insert(post_first, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid) + {:ok, post_second} = TestRepo.insert(post_second, on_conflict: {:replace, [:title, :visits]}, conflict_target: :uuid) + + assert post_first.id + assert post_second.id + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + # Multiple record change value: note `public` field is not changed + changes = [%{id: post_first.id, title: "first_updated", visits: 11, public: false, text: "first_updated", uuid: post_first.uuid}, + %{id: post_second.id, title: "second_updated", visits: 21, public: true, text: "second_updated", uuid: post_second.uuid}] + + TestRepo.insert_all(Post, changes, on_conflict: {:replace, [:title, :visits, :text]}, conflict_target: :uuid) + assert TestRepo.all(from p in Post, select: count(p.id)) == [2] + + updated_first = TestRepo.get(Post, post_first.id) + assert updated_first.title == "first_updated" + assert updated_first.visits == 11 + assert updated_first.public == true + assert updated_first.text == "first_updated" + + updated_second = TestRepo.get(Post, post_second.id) + assert updated_second.title == "second_updated" + assert updated_second.visits == 21 + assert updated_second.public == false + assert updated_second.text == "second_updated" + end + end +end diff --git a/integration/mssql/ecto/cases/type.exs b/integration/mssql/ecto/cases/type.exs new file mode 100644 index 0000000..6d46619 --- /dev/null +++ b/integration/mssql/ecto/cases/type.exs @@ -0,0 +1,388 @@ +Code.require_file "../support/types.exs", __DIR__ + +defmodule Ecto.Integration.TypeTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.{Custom, Item, ItemColor, Order, Post, User, Tag, Usec} + alias Ecto.Integration.TestRepo + import Ecto.Query + + test "primitive types" do + integer = 1 + float = 0.1 + text = <<0, 1>> + uuid = "00010203-0405-4607-8809-0a0b0c0d0e0f" + datetime = ~N[2014-01-16 20:26:51] + + TestRepo.insert!(%Post{text: text, public: true, visits: integer, uuid: uuid, + counter: integer, inserted_at: datetime, intensity: float}) + + # nil + assert [nil] = TestRepo.all(from Post, select: nil) + + # ID + assert [1] = TestRepo.all(from p in Post, where: p.counter == ^integer, select: p.counter) + + # Integers + assert [1] = TestRepo.all(from p in Post, where: p.visits == ^integer, select: p.visits) + assert [1] = TestRepo.all(from p in Post, where: p.visits == 1, select: p.visits) + assert [3] = TestRepo.all(from p in Post, select: p.visits + 2) + + # Floats + assert [0.1] = TestRepo.all(from p in Post, where: p.intensity == ^float, select: p.intensity) + assert [0.1] = TestRepo.all(from p in Post, where: p.intensity == 0.1, select: p.intensity) + assert [1500.0] = TestRepo.all(from p in Post, select: 1500.0) + assert [0.5] = TestRepo.all(from p in Post, select: p.intensity * 5) + + # Booleans + assert [true] = TestRepo.all(from p in Post, where: p.public == ^true, select: p.public) + assert [true] = TestRepo.all(from p in Post, where: p.public == true, select: p.public) + assert [false] = TestRepo.all(from p in Post, where: p.public == true, select: not p.public) + assert [true] = TestRepo.all(from p in Post, where: p.public == true, select: not not p.public) + + # Binaries + assert [^text] = TestRepo.all(from p in Post, where: p.text == <<0, 1>>, select: p.text) + assert [^text] = TestRepo.all(from p in Post, where: p.text == ^text, select: p.text) + + # UUID + assert [^uuid] = TestRepo.all(from p in Post, where: p.uuid == ^uuid, select: p.uuid) + + # NaiveDatetime + assert [^datetime] = TestRepo.all(from p in Post, where: p.inserted_at == ^datetime, select: p.inserted_at) + + # Datetime + datetime = DateTime.from_unix!(System.system_time(:second), :second) + TestRepo.insert!(%User{inserted_at: datetime}) + assert [^datetime] = TestRepo.all(from u in User, where: u.inserted_at == ^datetime, select: u.inserted_at) + + # usec + naive_datetime = ~N[2014-01-16 20:26:51.000000] + datetime = DateTime.from_naive!(~N[2014-01-16 20:26:51.000000], "Etc/UTC") + TestRepo.insert!(%Usec{naive_datetime_usec: naive_datetime, utc_datetime_usec: datetime}) + assert [^naive_datetime] = TestRepo.all(from u in Usec, where: u.naive_datetime_usec == ^naive_datetime, select: u.naive_datetime_usec) + assert [^datetime] = TestRepo.all(from u in Usec, where: u.utc_datetime_usec == ^datetime, select: u.utc_datetime_usec) + + naive_datetime = ~N[2014-01-16 20:26:51.123000] + datetime = DateTime.from_naive!(~N[2014-01-16 20:26:51.123000], "Etc/UTC") + TestRepo.insert!(%Usec{naive_datetime_usec: naive_datetime, utc_datetime_usec: datetime}) + assert [^naive_datetime] = TestRepo.all(from u in Usec, where: u.naive_datetime_usec == ^naive_datetime, select: u.naive_datetime_usec) + assert [^datetime] = TestRepo.all(from u in Usec, where: u.utc_datetime_usec == ^datetime, select: u.utc_datetime_usec) + end + + test "aggregate types" do + datetime = ~N[2014-01-16 20:26:51] + TestRepo.insert!(%Post{inserted_at: datetime}) + query = from p in Post, select: max(p.inserted_at) + assert [^datetime] = TestRepo.all(query) + end + + @tag :aggregate_filters + test "aggregate filter types" do + datetime = ~N[2014-01-16 20:26:51] + TestRepo.insert!(%Post{inserted_at: datetime}) + query = from p in Post, select: filter(max(p.inserted_at), p.public == ^true) + assert [^datetime] = TestRepo.all(query) + end + + test "coalesce type when default" do + TestRepo.insert!(%Post{text: nil}) + text = <<0, 1>> + query = from p in Post, select: coalesce(p.text, ^text) + assert [^text] = TestRepo.all(query) + end + + test "coalesce type when value" do + text = <<0, 2>> + default_text = <<0, 1>> + TestRepo.insert!(%Post{text: text}) + query = from p in Post, select: coalesce(p.text, ^default_text) + assert [^text] = TestRepo.all(query) + end + + test "tagged types" do + TestRepo.insert!(%Post{}) + + # Numbers + assert [1] = TestRepo.all(from Post, select: type(^"1", :integer)) + assert [1.0] = TestRepo.all(from Post, select: type(^1.0, :float)) + assert [1] = TestRepo.all(from p in Post, select: type(^"1", p.visits)) + assert [1.0] = TestRepo.all(from p in Post, select: type(^"1", p.intensity)) + + # Custom wrappers + assert [1] = TestRepo.all(from Post, select: type(^"1", Elixir.Custom.Permalink)) + + # Custom types + uuid = Ecto.UUID.generate() + assert [^uuid] = TestRepo.all(from Post, select: type(^uuid, Ecto.UUID)) + + # Math operations + assert [4] = TestRepo.all(from Post, select: type(2 + ^"2", :integer)) + assert [4.0] = TestRepo.all(from Post, select: type(2.0 + ^"2", :float)) + assert [4] = TestRepo.all(from p in Post, select: type(2 + ^"2", p.visits)) + assert [4.0] = TestRepo.all(from p in Post, select: type(2.0 + ^"2", p.intensity)) + end + + test "binary id type" do + assert %Custom{} = custom = TestRepo.insert!(%Custom{}) + bid = custom.bid + assert [^bid] = TestRepo.all(from c in Custom, select: c.bid) + assert [^bid] = TestRepo.all(from c in Custom, select: type(^bid, :binary_id)) + end + + test "text type" do + assert %Order{} = order = TestRepo.insert!(%Order{instructions: "hello"}) + id = order.id + assert order.instructions == "hello" + assert [^id] = TestRepo.all(from o in Order, where: o.instructions == ^"hello", select: o.id) + end + + @tag :array_type + test "array type" do + ints = [1, 2, 3] + tag = TestRepo.insert!(%Tag{ints: ints}) + + assert TestRepo.all(from t in Tag, where: t.ints == ^[], select: t.ints) == [] + assert TestRepo.all(from t in Tag, where: t.ints == ^[1, 2, 3], select: t.ints) == [ints] + + # Both sides interpolation + assert TestRepo.all(from t in Tag, where: ^"b" in ^["a", "b", "c"], select: t.ints) == [ints] + assert TestRepo.all(from t in Tag, where: ^"b" in [^"a", ^"b", ^"c"], select: t.ints) == [ints] + + # Querying + assert TestRepo.all(from t in Tag, where: t.ints == [1, 2, 3], select: t.ints) == [ints] + assert TestRepo.all(from t in Tag, where: 0 in t.ints, select: t.ints) == [] + assert TestRepo.all(from t in Tag, where: 1 in t.ints, select: t.ints) == [ints] + + # Update + tag = TestRepo.update!(Ecto.Changeset.change tag, ints: nil) + assert TestRepo.get!(Tag, tag.id).ints == nil + + tag = TestRepo.update!(Ecto.Changeset.change tag, ints: [3, 2, 1]) + assert TestRepo.get!(Tag, tag.id).ints == [3, 2, 1] + + # Update all + {1, _} = TestRepo.update_all(Tag, push: [ints: 0]) + assert TestRepo.get!(Tag, tag.id).ints == [3, 2, 1, 0] + + {1, _} = TestRepo.update_all(Tag, pull: [ints: 2]) + assert TestRepo.get!(Tag, tag.id).ints == [3, 1, 0] + + {1, _} = TestRepo.update_all(Tag, set: [ints: nil]) + assert TestRepo.get!(Tag, tag.id).ints == nil + end + + @tag :array_type + test "array type with custom types" do + uuids = ["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"] + TestRepo.insert!(%Tag{uuids: ["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"]}) + + assert TestRepo.all(from t in Tag, where: t.uuids == ^[], select: t.uuids) == [] + assert TestRepo.all(from t in Tag, where: t.uuids == ^["51fcfbdd-ad60-4ccb-8bf9-47aabd66d075"], + select: t.uuids) == [uuids] + + {1, _} = TestRepo.update_all(Tag, set: [uuids: nil]) + assert TestRepo.all(from t in Tag, select: t.uuids) == [nil] + end + + @tag :array_type + test "array type with nil in array" do + tag = TestRepo.insert!(%Tag{ints: [1, nil, 3]}) + assert tag.ints == [1, nil, 3] + end + + @tag :map_type + test "untyped map" do + post1 = TestRepo.insert!(%Post{meta: %{"foo" => "bar", "baz" => "bat"}}) + post2 = TestRepo.insert!(%Post{meta: %{foo: "bar", baz: "bat"}}) + + assert TestRepo.all(from p in Post, where: p.id == ^post1.id, select: p.meta) == + [%{"foo" => "bar", "baz" => "bat"}] + assert TestRepo.all(from p in Post, where: p.id == ^post2.id, select: p.meta) == + [%{"foo" => "bar", "baz" => "bat"}] + end + + @tag :map_type + test "typed string map" do + post1 = TestRepo.insert!(%Post{links: %{"foo" => "http://foo.com", "bar" => "http://bar.com"}}) + post2 = TestRepo.insert!(%Post{links: %{foo: "http://foo.com", bar: "http://bar.com"}}) + + assert TestRepo.all(from p in Post, where: p.id == ^post1.id, select: p.links) == + [%{"foo" => "http://foo.com", "bar" => "http://bar.com"}] + assert TestRepo.all(from p in Post, where: p.id == ^post2.id, select: p.links) == + [%{"foo" => "http://foo.com", "bar" => "http://bar.com"}] + end + + @tag :map_type + test "typed float map" do + post = TestRepo.insert!(%Post{intensities: %{"foo" => 1.0, "bar" => 416500.0}}) + + # Note we are using === since we want to check integer vs float + assert TestRepo.all(from p in Post, where: p.id == ^post.id, select: p.intensities) === + [%{"foo" => 1.0, "bar" => 416500.0}] + end + + @tag :map_type + test "map type on update" do + post = TestRepo.insert!(%Post{meta: %{"world" => "hello"}}) + assert TestRepo.get!(Post, post.id).meta == %{"world" => "hello"} + + post = TestRepo.update!(Ecto.Changeset.change post, meta: %{hello: "world"}) + assert TestRepo.get!(Post, post.id).meta == %{"hello" => "world"} + + query = from(p in Post, where: p.id == ^post.id) + TestRepo.update_all(query, set: [meta: %{world: "hello"}]) + assert TestRepo.get!(Post, post.id).meta == %{"world" => "hello"} + end + + @tag :map_type + test "embeds one" do + item = %Item{price: 123, valid_at: ~D[2014-01-16]} + + order = + %Order{} + |> Ecto.Changeset.change + |> Ecto.Changeset.put_embed(:item, item) + |> TestRepo.insert!() + + dbitem = TestRepo.get!(Order, order.id).item + assert item.reference == dbitem.reference + assert item.price == dbitem.price + assert item.valid_at == dbitem.valid_at + assert dbitem.id + + [dbitem] = TestRepo.all(from o in Order, select: o.item) + assert item.reference == dbitem.reference + assert item.price == dbitem.price + assert item.valid_at == dbitem.valid_at + assert dbitem.id + + {1, _} = TestRepo.update_all(Order, set: [item: %{dbitem | price: 456}]) + assert TestRepo.get!(Order, order.id).item.price == 456 + end + + @tag :map_type + test "embeds one with custom type" do + item = %Item{price: 123, reference: "PREFIX-EXAMPLE"} + + order = + %Order{} + |> Ecto.Changeset.change + |> Ecto.Changeset.put_embed(:item, item) + |> TestRepo.insert!() + + dbitem = TestRepo.get!(Order, order.id).item + assert dbitem.reference == "PREFIX-EXAMPLE" + assert [%{"reference" => "EXAMPLE"}] = TestRepo.all(from o in "orders", select: o.item) + end + + @tag :map_type + test "empty embeds one" do + order = TestRepo.insert!(%Order{}) + assert order.item == nil + assert TestRepo.get!(Order, order.id).item == nil + end + + @tag :map_type + @tag :array_type + test "embeds many" do + item = %Item{price: 123, valid_at: ~D[2014-01-16]} + tag = + %Tag{} + |> Ecto.Changeset.change + |> Ecto.Changeset.put_embed(:items, [item]) + tag = TestRepo.insert!(tag) + + [dbitem] = TestRepo.get!(Tag, tag.id).items + assert item.price == dbitem.price + assert item.valid_at == dbitem.valid_at + assert dbitem.id + + [[dbitem]] = TestRepo.all(from t in Tag, select: t.items) + assert item.price == dbitem.price + assert item.valid_at == dbitem.valid_at + assert dbitem.id + + {1, _} = TestRepo.update_all(Tag, set: [items: [%{dbitem | price: 456}]]) + assert (TestRepo.get!(Tag, tag.id).items |> hd).price == 456 + end + + @tag :map_type + @tag :array_type + test "empty embeds many" do + tag = TestRepo.insert!(%Tag{}) + assert tag.items == [] + assert TestRepo.get!(Tag, tag.id).items == [] + end + + @tag :map_type + @tag :array_type + test "nested embeds" do + red = %ItemColor{name: "red"} + blue = %ItemColor{name: "blue"} + item = %Item{ + primary_color: red, + secondary_colors: [blue] + } + + order = + %Order{} + |> Ecto.Changeset.change + |> Ecto.Changeset.put_embed(:item, item) + order = TestRepo.insert!(order) + dbitem = TestRepo.get!(Order, order.id).item + assert item.primary_color == dbitem.primary_color + assert item.secondary_colors == dbitem.secondary_colors + assert dbitem.id + + [dbitem] = TestRepo.all(from o in Order, select: o.item) + assert item.primary_color == dbitem.primary_color + assert item.secondary_colors == dbitem.secondary_colors + assert dbitem.id + end + + @tag :decimal_type + test "decimal type" do + decimal = Decimal.new("1.0") + TestRepo.insert!(%Post{cost: decimal}) + + assert [^decimal] = TestRepo.all(from p in Post, where: p.cost == ^decimal, select: p.cost) + assert [^decimal] = TestRepo.all(from p in Post, where: p.cost == ^1.0, select: p.cost) + assert [^decimal] = TestRepo.all(from p in Post, where: p.cost == ^1, select: p.cost) + assert [^decimal] = TestRepo.all(from p in Post, where: p.cost == 1.0, select: p.cost) + assert [^decimal] = TestRepo.all(from p in Post, where: p.cost == 1, select: p.cost) + + assert TestRepo.all(from p in Post, select: p.cost * 2) == [Decimal.new("2.0")] + assert TestRepo.all(from p in Post, select: p.cost - p.cost) == [Decimal.new("0.0")] + assert TestRepo.all(from p in Post, select: type(2 + ^"2", p.cost)) == [Decimal.new("4")] + assert TestRepo.all(from p in Post, select: type(2.0 + ^"2", p.cost)) == [Decimal.new("4.0")] + end + + @tag :decimal_type + test "typed aggregations" do + decimal = Decimal.new("1.0") + TestRepo.insert!(%Post{cost: decimal}) + + assert [1] = TestRepo.all(from p in Post, select: type(sum(p.cost), :integer)) + assert [1.0] = TestRepo.all(from p in Post, select: type(sum(p.cost), :float)) + assert [^decimal] = TestRepo.all(from p in Post, select: type(sum(p.cost), :decimal)) + end + + test "schemaless types" do + TestRepo.insert!(%Post{visits: 123}) + assert [123] = TestRepo.all(from p in "posts", select: type(p.visits, :integer)) + end + + test "schemaless calendar types" do + datetime = ~N[2014-01-16 20:26:51] + assert {1, _} = + TestRepo.insert_all("posts", [[inserted_at: datetime]]) + assert {1, _} = + TestRepo.update_all("posts", set: [inserted_at: datetime]) + assert [_] = + TestRepo.all(from p in "posts", where: p.inserted_at >= ^datetime, select: p.inserted_at) + assert [_] = + TestRepo.all(from p in "posts", where: p.inserted_at in [^datetime], select: p.inserted_at) + assert [_] = + TestRepo.all(from p in "posts", where: p.inserted_at in ^[datetime], select: p.inserted_at) + end +end diff --git a/integration/mssql/ecto/cases/windows.exs b/integration/mssql/ecto/cases/windows.exs new file mode 100644 index 0000000..f370eb0 --- /dev/null +++ b/integration/mssql/ecto/cases/windows.exs @@ -0,0 +1,53 @@ +defmodule Ecto.Integration.WindowsTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.TestRepo + import Ecto.Query + + alias Ecto.Integration.{Comment, User, Post} + + test "over" do + u1 = TestRepo.insert!(%User{name: "Tester"}) + u2 = TestRepo.insert!(%User{name: "Developer"}) + c1 = TestRepo.insert!(%Comment{text: "1", author_id: u1.id}) + c2 = TestRepo.insert!(%Comment{text: "2", author_id: u1.id}) + c3 = TestRepo.insert!(%Comment{text: "3", author_id: u1.id}) + c4 = TestRepo.insert!(%Comment{text: "4", author_id: u2.id}) + + # Over nothing + query = from(c in Comment, select: [c, count(c.id) |> over()]) + assert [[^c1, 4], [^c2, 4], [^c3, 4], [^c4, 4]] = TestRepo.all(query) + + # Over partition + query = from(c in Comment, select: [c, count(c.id) |> over(partition_by: c.author_id)]) + assert [[^c1, 3], [^c2, 3], [^c3, 3], [^c4, 1]] = TestRepo.all(query) + + # Over window + query = from(c in Comment, windows: [w: [partition_by: c.author_id]], select: [c, count(c.id) |> over(:w)]) + assert [[^c1, 3], [^c2, 3], [^c3, 3], [^c4, 1]] = TestRepo.all(query) + end + + test "frame" do + posts = Enum.map(0..6, &%{counter: &1, visits: round(:math.pow(2, &1))}) + TestRepo.insert_all(Post, posts) + + n = 1 + query = from(p in Post, + windows: [w: [order_by: p.counter, frame: fragment("ROWS BETWEEN ? PRECEDING AND ? FOLLOWING", ^n, ^n)]], + select: [p.counter, sum(p.visits) |> over(:w)] + ) + assert [[0, 3], [1, 7], [2, 14], [3, 28], [4, 56], [5, 112], [6, 96]] = TestRepo.all(query) + + query = from(p in Post, + windows: [w: [order_by: p.counter, frame: fragment("ROWS BETWEEN 1 FOLLOWING AND UNBOUNDED FOLLOWING")]], + select: [p.counter, sum(p.visits) |> over(:w)] + ) + assert [[0, 126], [1, 124], [2, 120], [3, 112], [4, 96], [5, 64], [6, nil]] = TestRepo.all(query) + + query = from(p in Post, + windows: [w: [order_by: p.counter, frame: fragment("ROWS CURRENT ROW")]], + select: [p.counter, sum(p.visits) |> over(:w)] + ) + assert [[0, 1], [1, 2], [2, 4], [3, 8], [4, 16], [5, 32], [6, 64]] = TestRepo.all(query) + end +end diff --git a/integration/mssql/ecto/support/schemas.exs b/integration/mssql/ecto/support/schemas.exs new file mode 100644 index 0000000..6fd2f47 --- /dev/null +++ b/integration/mssql/ecto/support/schemas.exs @@ -0,0 +1,332 @@ +defmodule Ecto.Integration.Schema do + defmacro __using__(_) do + quote do + use Ecto.Schema + type = + Application.get_env(:ecto, :primary_key_type) || + raise ":primary_key_type not set in :ecto application" + @primary_key {:id, type, autogenerate: true} + @foreign_key_type type + end + end +end + +defmodule PrefixedString do + use Ecto.Type + def type(), do: :string + def cast(string), do: {:ok, string} + def load(string), do: {:ok, "PREFIX-" <> string} + def dump("PREFIX-" <> string), do: {:ok, string} + def dump(_string), do: :error + def embed_as(_), do: :dump +end + +defmodule Ecto.Integration.Post do + @moduledoc """ + This module is used to test: + + * Overall functionality + * Overall types + * Non-null timestamps + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + import Ecto.Changeset + + schema "posts" do + field :counter, :id # Same as integer + field :title, :string + field :text, :binary + field :temp, :string, default: "temp", virtual: true + field :public, :boolean, default: true + field :cost, :decimal + field :visits, :integer + field :intensity, :float + field :bid, :binary_id + field :uuid, Ecto.Integration.TestRepo.uuid(), autogenerate: true + field :meta, :map + field :links, {:map, :string} + field :intensities, {:map, :float} + field :posted, :date + has_many :comments, Ecto.Integration.Comment, on_delete: :delete_all, on_replace: :delete + # The post<->permalink relationship should be marked as uniq + has_one :permalink, Ecto.Integration.Permalink, on_delete: :delete_all, on_replace: :delete + has_one :update_permalink, Ecto.Integration.Permalink, foreign_key: :post_id, on_delete: :delete_all, on_replace: :update + has_many :comments_authors, through: [:comments, :author] + belongs_to :author, Ecto.Integration.User + many_to_many :users, Ecto.Integration.User, + join_through: "posts_users", on_delete: :delete_all, on_replace: :delete + many_to_many :unique_users, Ecto.Integration.User, + join_through: "posts_users", unique: true + many_to_many :constraint_users, Ecto.Integration.User, + join_through: Ecto.Integration.PostUserCompositePk + has_many :users_comments, through: [:users, :comments] + has_many :comments_authors_permalinks, through: [:comments_authors, :permalink] + has_one :post_user_composite_pk, Ecto.Integration.PostUserCompositePk + timestamps() + end + + def changeset(schema, params) do + cast(schema, params, ~w(counter title text temp public cost visits + intensity bid uuid meta posted)a) + end +end + +defmodule Ecto.Integration.Comment do + @moduledoc """ + This module is used to test: + + * Optimistic lock + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "comments" do + field :text, :string + field :lock_version, :integer, default: 1 + belongs_to :post, Ecto.Integration.Post + belongs_to :author, Ecto.Integration.User + has_one :post_permalink, through: [:post, :permalink] + end + + def changeset(schema, params) do + Ecto.Changeset.cast(schema, params, [:text]) + end +end + +defmodule Ecto.Integration.Permalink do + @moduledoc """ + This module is used to test: + + * Field sources + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "permalinks" do + field :url, :string, source: :uniform_resource_locator + belongs_to :post, Ecto.Integration.Post, on_replace: :nilify + belongs_to :update_post, Ecto.Integration.Post, on_replace: :update, foreign_key: :post_id, define_field: false + belongs_to :user, Ecto.Integration.User + has_many :post_comments_authors, through: [:post, :comments_authors] + end + + def changeset(schema, params) do + Ecto.Changeset.cast(schema, params, [:url]) + end +end + +defmodule Ecto.Integration.PostUser do + @moduledoc """ + This module is used to test: + + * Many to many associations join_through with schema + + """ + use Ecto.Integration.Schema + + schema "posts_users_pk" do + belongs_to :user, Ecto.Integration.User + belongs_to :post, Ecto.Integration.Post + timestamps() + end +end + +defmodule Ecto.Integration.User do + @moduledoc """ + This module is used to test: + + * UTC Timestamps + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "users" do + field :name, :string + has_many :comments, Ecto.Integration.Comment, foreign_key: :author_id, on_delete: :nilify_all, on_replace: :nilify + has_one :permalink, Ecto.Integration.Permalink, on_replace: :nilify + has_many :posts, Ecto.Integration.Post, foreign_key: :author_id, on_delete: :nothing, on_replace: :delete + belongs_to :custom, Ecto.Integration.Custom, references: :bid, type: :binary_id + many_to_many :schema_posts, Ecto.Integration.Post, join_through: Ecto.Integration.PostUser + many_to_many :unique_posts, Ecto.Integration.Post, join_through: Ecto.Integration.PostUserCompositePk + timestamps(type: :utc_datetime) + end +end + +defmodule Ecto.Integration.Custom do + @moduledoc """ + This module is used to test: + + * binary_id primary key + * Tying another schemas to an existing schema + + Due to the second item, it must be a subset of posts. + """ + use Ecto.Integration.Schema + + @primary_key {:bid, :binary_id, autogenerate: true} + schema "customs" do + field :uuid, Ecto.Integration.TestRepo.uuid() + many_to_many :customs, Ecto.Integration.Custom, + join_through: "customs_customs", join_keys: [custom_id1: :bid, custom_id2: :bid], + on_delete: :delete_all, on_replace: :delete + end +end + +defmodule Ecto.Integration.Barebone do + @moduledoc """ + This module is used to test: + + * A schema without primary keys + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "barebones" do + field :num, :integer + end +end + +defmodule Ecto.Integration.Tag do + @moduledoc """ + This module is used to test: + + * The array type + * Embedding many schemas (uses array) + + """ + use Ecto.Integration.Schema + + schema "tags" do + field :ints, {:array, :integer} + field :uuids, {:array, Ecto.Integration.TestRepo.uuid()} + embeds_many :items, Ecto.Integration.Item + end +end + +defmodule Ecto.Integration.Item do + @moduledoc """ + This module is used to test: + + * Embedding + + """ + use Ecto.Schema + + embedded_schema do + field :reference, PrefixedString + field :price, :integer + field :valid_at, :date + + embeds_one :primary_color, Ecto.Integration.ItemColor + embeds_many :secondary_colors, Ecto.Integration.ItemColor + end +end + +defmodule Ecto.Integration.ItemColor do + @moduledoc """ + This module is used to test: + + * Nested embeds + + """ + use Ecto.Schema + + embedded_schema do + field :name, :string + end +end + +defmodule Ecto.Integration.Order do + @moduledoc """ + This module is used to test: + + * Text columns + * Embedding one schema + + """ + use Ecto.Integration.Schema + + schema "orders" do + field :instructions, :string + embeds_one :item, Ecto.Integration.Item + belongs_to :permalink, Ecto.Integration.Permalink + end +end + +defmodule Ecto.Integration.CompositePk do + @moduledoc """ + This module is used to test: + + * Composite primary keys + + """ + use Ecto.Integration.Schema + import Ecto.Changeset + + @primary_key false + schema "composite_pk" do + field :a, :integer, primary_key: true + field :b, :integer, primary_key: true + field :name, :string + end + def changeset(schema, params) do + cast(schema, params, ~w(a b name)a) + end +end + +defmodule Ecto.Integration.CorruptedPk do + @moduledoc """ + This module is used to test: + + * Primary keys that is not unique on a DB side + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "corrupted_pk" do + field :a, :string, primary_key: true + end +end + +defmodule Ecto.Integration.PostUserCompositePk do + @moduledoc """ + This module is used to test: + + * Composite primary keys for 2 belongs_to fields + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "posts_users_composite_pk" do + belongs_to :user, Ecto.Integration.User, primary_key: true + belongs_to :post, Ecto.Integration.Post, primary_key: true + timestamps() + end +end + +defmodule Ecto.Integration.Usec do + @moduledoc """ + This module is used to test: + + * usec datetime types + + """ + use Ecto.Integration.Schema + + schema "usecs" do + field :naive_datetime_usec, :naive_datetime_usec + field :utc_datetime_usec, :utc_datetime_usec + end +end diff --git a/integration/mssql/support/types.exs b/integration/mssql/ecto/support/types.exs similarity index 94% rename from integration/mssql/support/types.exs rename to integration/mssql/ecto/support/types.exs index 35f4197..f71ac15 100644 --- a/integration/mssql/support/types.exs +++ b/integration/mssql/ecto/support/types.exs @@ -4,7 +4,7 @@ defmodule Custom.Permalink do def cast(string) when is_binary(string) do case Integer.parse(string) do {int, _} -> {:ok, int} - :error -> :error + :error -> :error end end diff --git a/integration/mssql/ecto_sql/sql/alter.exs b/integration/mssql/ecto_sql/sql/alter.exs new file mode 100644 index 0000000..cbaaaaf --- /dev/null +++ b/integration/mssql/ecto_sql/sql/alter.exs @@ -0,0 +1,90 @@ +defmodule Ecto.Integration.AlterTest do + use Ecto.Integration.Case, async: false + + alias Ecto.Integration.PoolRepo + + defmodule AlterMigrationOne do + use Ecto.Migration + + def up do + create table(:alter_col_type) do + add :value, :integer + end + + execute "INSERT INTO alter_col_type (value) VALUES (1)" + end + + def down do + drop table(:alter_col_type) + end + end + + defmodule AlterMigrationTwo do + use Ecto.Migration + + def up do + alter table(:alter_col_type) do + modify :value, :numeric + end + end + + def down do + alter table(:alter_col_type) do + modify :value, :integer + end + end + end + + import Ecto.Query, only: [from: 1, from: 2] + + defp run(direction, repo, module) do + Ecto.Migration.Runner.run(repo, 1, module, :forward, direction, direction, log: false) + end + + test "reset cache on returning query after alter column type" do + values = from v in "alter_col_type", select: v.value + + assert :ok == run(:up, PoolRepo, AlterMigrationOne) + assert PoolRepo.all(values) == [1] + + assert :ok == run(:up, PoolRepo, AlterMigrationTwo) + [%Decimal{}] = PoolRepo.all(values) + + PoolRepo.transaction(fn() -> + assert [%Decimal{}] = PoolRepo.all(values) + assert :ok == run(:down, PoolRepo, AlterMigrationTwo) + + # Optionally fail once with database error when + # already prepared on connection (and clear cache) + try do + PoolRepo.all(values, [mode: :savepoint]) + rescue + _ -> + assert PoolRepo.all(values) == [1] + else + result -> + assert result == [1] + end + end) + after + assert :ok == run(:down, PoolRepo, AlterMigrationOne) + end + + test "reset cache on parameterised query after alter column type" do + values = from v in "alter_col_type" + + assert :ok == run(:up, PoolRepo, AlterMigrationOne) + assert PoolRepo.update_all(values, [set: [value: 2]]) == {1, nil} + + assert :ok == run(:up, PoolRepo, AlterMigrationTwo) + assert PoolRepo.update_all(values, [set: [value: 3]]) == {1, nil} + + PoolRepo.transaction(fn() -> + assert PoolRepo.update_all(values, [set: [value: Decimal.new(5)]]) == {1, nil} + assert :ok == run(:down, PoolRepo, AlterMigrationTwo) + assert PoolRepo.update_all(values, [set: [value: 6]]) == {1, nil} + end) + after + assert :ok == run(:down, PoolRepo, AlterMigrationOne) + end +end diff --git a/integration/mssql/sql/lock.exs b/integration/mssql/ecto_sql/sql/lock.exs similarity index 67% rename from integration/mssql/sql/lock.exs rename to integration/mssql/ecto_sql/sql/lock.exs index 12927db..eb99ad5 100644 --- a/integration/mssql/sql/lock.exs +++ b/integration/mssql/ecto_sql/sql/lock.exs @@ -10,7 +10,7 @@ defmodule Ecto.Integration.LockTest do use Ecto.Schema schema "lock_counters" do - field(:count, :integer) + field :count, :integer end end @@ -24,8 +24,8 @@ defmodule Ecto.Integration.LockTest do pid = self() lock_for_update = - Application.get_env(:ecto, :lock_for_update) || - raise ":lock_for_update not set in :ecto application" + Application.get_env(:ecto_sql, :lock_for_update) || + raise ":lock_for_update not set in :ecto application" # Here we are manually inserting the lock in the query # to test multiple adapters. Never do this in actual @@ -34,26 +34,21 @@ defmodule Ecto.Integration.LockTest do query = %{query | lock: lock_for_update} {:ok, new_pid} = - Task.start_link(fn -> + Task.start_link fn -> assert_receive :select_for_update, 5000 PoolRepo.transaction(fn -> - # this should block until the other trans. commit - [post] = PoolRepo.all(query) - - post |> Ecto.Changeset.change(count: post.count + 1) - |> PoolRepo.update!() + [post] = PoolRepo.all(query) # this should block until the other trans. commit + post |> Ecto.Changeset.change(count: post.count + 1) |> PoolRepo.update! end) - send(pid, :updated) - end) + send pid, :updated + end PoolRepo.transaction(fn -> - # select and lock the row - [post] = PoolRepo.all(query) - # signal second process to begin a transaction - send(new_pid, :select_for_update) - post |> Ecto.Changeset.change(count: post.count + 1) |> PoolRepo.update!() + [post] = PoolRepo.all(query) # select and lock the row + send new_pid, :select_for_update # signal second process to begin a transaction + post |> Ecto.Changeset.change(count: post.count + 1) |> PoolRepo.update! end) assert_receive :updated, 5000 diff --git a/integration/mssql/ecto_sql/sql/migration.exs b/integration/mssql/ecto_sql/sql/migration.exs new file mode 100644 index 0000000..2a55a8f --- /dev/null +++ b/integration/mssql/ecto_sql/sql/migration.exs @@ -0,0 +1,476 @@ +defmodule Ecto.Integration.MigrationTest do + use ExUnit.Case, async: true + + alias Ecto.Integration.{TestRepo, PoolRepo} + + defmodule CreateMigration do + use Ecto.Migration + + @table table(:create_table_migration) + @index index(:create_table_migration, [:value], unique: true) + + def up do + create @table do + add :value, :integer + end + create @index + end + + def down do + drop @index + drop @table + end + end + + defmodule AddColumnMigration do + use Ecto.Migration + + def up do + create table(:add_col_migration) do + add :value, :integer + end + + alter table(:add_col_migration) do + add :to_be_added, :integer + end + + execute "INSERT INTO add_col_migration (value, to_be_added) VALUES (1, 2)" + end + + def down do + drop table(:add_col_migration) + end + end + + defmodule AlterColumnMigration do + use Ecto.Migration + + def up do + create table(:alter_col_migration) do + add :from_null_to_not_null, :integer + add :from_not_null_to_null, :integer, null: false + + add :from_default_to_no_default, :integer, default: 0 + add :from_no_default_to_default, :integer + end + + alter table(:alter_col_migration) do + modify :from_null_to_not_null, :string, null: false + modify :from_not_null_to_null, :string, null: true + + modify :from_default_to_no_default, :integer, default: nil + modify :from_no_default_to_default, :integer, default: 0 + end + + execute "INSERT INTO alter_col_migration (from_null_to_not_null) VALUES ('foo')" + end + + def down do + drop table(:alter_col_migration) + end + end + + defmodule AlterColumnFromMigration do + use Ecto.Migration + + def change do + create table(:modify_from_authors, primary_key: false) do + add :id, :integer, primary_key: true + end + create table(:modify_from_posts) do + add :author_id, references(:modify_from_authors, type: :integer) + end + + if direction() == :up do + flush() + PoolRepo.insert_all "modify_from_authors", [[id: 1]] + PoolRepo.insert_all "modify_from_posts", [[author_id: 1]] + end + + alter table(:modify_from_posts) do + # remove the constraints modify_from_posts_author_id_fkey + modify :author_id, :integer, from: references(:modify_from_authors, type: :integer) + end + alter table(:modify_from_authors) do + modify :id, :bigint, from: :integer + end + alter table(:modify_from_posts) do + # add the constraints modify_from_posts_author_id_fkey + modify :author_id, references(:modify_from_authors, type: :bigint), from: :integer + end + end + end + + defmodule AlterForeignKeyOnDeleteMigration do + use Ecto.Migration + + def up do + create table(:alter_fk_users) + + create table(:alter_fk_posts) do + add :alter_fk_user_id, :id + end + + alter table(:alter_fk_posts) do + modify :alter_fk_user_id, references(:alter_fk_users, on_delete: :nilify_all) + end + + execute "INSERT INTO alter_fk_users (id) VALUES ('1')" + execute "INSERT INTO alter_fk_posts (id, alter_fk_user_id) VALUES ('1', '1')" + execute "DELETE FROM alter_fk_users" + end + + def down do + drop table(:alter_fk_posts) + drop table(:alter_fk_users) + end + end + + defmodule AlterForeignKeyOnUpdateMigration do + use Ecto.Migration + + def up do + create table(:alter_fk_users) + + create table(:alter_fk_posts) do + add :alter_fk_user_id, :id + end + + alter table(:alter_fk_posts) do + modify :alter_fk_user_id, references(:alter_fk_users, on_update: :update_all) + end + + execute "INSERT INTO alter_fk_users (id) VALUES ('1')" + execute "INSERT INTO alter_fk_posts (id, alter_fk_user_id) VALUES ('1', '1')" + execute "UPDATE alter_fk_users SET id = '2'" + end + + def down do + drop table(:alter_fk_posts) + drop table(:alter_fk_users) + end + end + + defmodule DropColumnMigration do + use Ecto.Migration + + def up do + create table(:drop_col_migration) do + add :value, :integer + add :to_be_removed, :integer + end + + execute "INSERT INTO drop_col_migration (value, to_be_removed) VALUES (1, 2)" + + alter table(:drop_col_migration) do + remove :to_be_removed + end + end + + def down do + drop table(:drop_col_migration) + end + end + + defmodule RenameColumnMigration do + use Ecto.Migration + + def up do + create table(:rename_col_migration) do + add :to_be_renamed, :integer + end + + rename table(:rename_col_migration), :to_be_renamed, to: :was_renamed + + execute "INSERT INTO rename_col_migration (was_renamed) VALUES (1)" + end + + def down do + drop table(:rename_col_migration) + end + end + + defmodule OnDeleteMigration do + use Ecto.Migration + + def up do + create table(:parent1) + create table(:parent2) + + create table(:ref_migration) do + add :parent1, references(:parent1, on_delete: :nilify_all) + end + + alter table(:ref_migration) do + add :parent2, references(:parent2, on_delete: :delete_all) + end + end + + def down do + drop table(:ref_migration) + drop table(:parent1) + drop table(:parent2) + end + end + + defmodule ReferencesRollbackMigration do + use Ecto.Migration + + def change do + create table(:parent) do + add :name, :string + end + + create table(:child) do + add :parent_id, references(:parent) + end + end + end + + defmodule RenameMigration do + use Ecto.Migration + + @table_current table(:posts_migration) + @table_new table(:new_posts_migration) + + def up do + create @table_current + rename @table_current, to: @table_new + end + + def down do + drop @table_new + end + end + + defmodule PrefixMigration do + use Ecto.Migration + + @prefix "ecto_prefix_test" + + def up do + execute TestRepo.create_prefix(@prefix) + create table(:first, prefix: @prefix) + create table(:second, prefix: @prefix) do + add :first_id, references(:first) + end + end + + def down do + drop table(:second, prefix: @prefix) + drop table(:first, prefix: @prefix) + execute TestRepo.drop_prefix(@prefix) + end + end + + defmodule NoSQLMigration do + use Ecto.Migration + + def up do + create table(:collection, options: [capped: true]) + execute create: "collection" + end + end + + defmodule Parent do + use Ecto.Schema + + schema "parent" do + end + end + + defmodule NoErrorTableMigration do + use Ecto.Migration + + def change do + create_if_not_exists table(:existing) do + add :name, :string + end + + create_if_not_exists table(:existing) do + add :name, :string + end + + create_if_not_exists table(:existing) + + drop_if_exists table(:existing) + drop_if_exists table(:existing) + end + end + + defmodule NoErrorIndexMigration do + use Ecto.Migration + + def change do + create_if_not_exists index(:posts, [:title]) + create_if_not_exists index(:posts, [:title]) + drop_if_exists index(:posts, [:title]) + drop_if_exists index(:posts, [:title]) + end + end + + defmodule InferredDropIndexMigration do + use Ecto.Migration + + def change do + create index(:posts, [:title]) + end + end + + defmodule AlterPrimaryKeyMigration do + use Ecto.Migration + + def change do + create table(:no_pk, primary_key: false) do + add :dummy, :string + end + alter table(:no_pk) do + add :id, :serial, primary_key: true + end + end + end + + import Ecto.Query, only: [from: 2] + import Ecto.Migrator, only: [up: 4, down: 4] + + # Avoid migration out of order warnings + @moduletag :capture_log + @base_migration 1_000_000 + + setup do + {:ok, migration_number: System.unique_integer([:positive]) + @base_migration} + end + + test "create and drop table and indexes", %{migration_number: num} do + assert :ok == up(PoolRepo, num, CreateMigration, log: false) + assert :ok == down(PoolRepo, num, CreateMigration, log: false) + end + + test "correctly infers how to drop index", %{migration_number: num} do + assert :ok == up(PoolRepo, num, InferredDropIndexMigration, log: false) + assert :ok == down(PoolRepo, num, InferredDropIndexMigration, log: false) + end + + test "supports references", %{migration_number: num} do + assert :ok == up(PoolRepo, num, OnDeleteMigration, log: false) + + parent1 = PoolRepo.insert! Ecto.put_meta(%Parent{}, source: "parent1") + parent2 = PoolRepo.insert! Ecto.put_meta(%Parent{}, source: "parent2") + + writer = "INSERT INTO ref_migration (parent1, parent2) VALUES (#{parent1.id}, #{parent2.id})" + PoolRepo.query!(writer) + + reader = from r in "ref_migration", select: {r.parent1, r.parent2} + assert PoolRepo.all(reader) == [{parent1.id, parent2.id}] + + PoolRepo.delete!(parent1) + assert PoolRepo.all(reader) == [{nil, parent2.id}] + + PoolRepo.delete!(parent2) + assert PoolRepo.all(reader) == [] + + assert :ok == down(PoolRepo, num, OnDeleteMigration, log: false) + end + + test "rolls back references in change/1", %{migration_number: num} do + assert :ok == up(PoolRepo, num, ReferencesRollbackMigration, log: false) + assert :ok == down(PoolRepo, num, ReferencesRollbackMigration, log: false) + end + + test "create table if not exists and drop table if exists does not raise on failure", %{migration_number: num} do + assert :ok == up(PoolRepo, num, NoErrorTableMigration, log: false) + end + + @tag :create_index_if_not_exists + test "create index if not exists and drop index if exists does not raise on failure", %{migration_number: num} do + assert :ok == up(PoolRepo, num, NoErrorIndexMigration, log: false) + end + + test "raises on NoSQL migrations", %{migration_number: num} do + assert_raise ArgumentError, ~r"does not support keyword lists in :options", fn -> + up(PoolRepo, num, NoSQLMigration, log: false) + end + end + + @tag :add_column + test "add column", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AddColumnMigration, log: false) + assert [2] == PoolRepo.all from p in "add_col_migration", select: p.to_be_added + :ok = down(PoolRepo, num, AddColumnMigration, log: false) + end + + @tag :modify_column + test "modify column", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AlterColumnMigration, log: false) + + assert ["foo"] == + PoolRepo.all from p in "alter_col_migration", select: p.from_null_to_not_null + assert [nil] == + PoolRepo.all from p in "alter_col_migration", select: p.from_not_null_to_null + assert [nil] == + PoolRepo.all from p in "alter_col_migration", select: p.from_default_to_no_default + assert [0] == + PoolRepo.all from p in "alter_col_migration", select: p.from_no_default_to_default + + query = "INSERT INTO alter_col_migration (from_not_null_to_null) VALUES ('foo')" + assert catch_error(PoolRepo.query!(query)) + + :ok = down(PoolRepo, num, AlterColumnMigration, log: false) + end + + @tag :modify_column_with_from + test "modify column with from", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AlterColumnFromMigration, log: false) + + assert [1] == + PoolRepo.all from p in "modify_from_posts", select: p.author_id + + :ok = down(PoolRepo, num, AlterColumnFromMigration, log: false) + end + + @tag :modify_foreign_key_on_delete + test "modify foreign key's on_delete constraint", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AlterForeignKeyOnDeleteMigration, log: false) + assert [nil] == PoolRepo.all from p in "alter_fk_posts", select: p.alter_fk_user_id + :ok = down(PoolRepo, num, AlterForeignKeyOnDeleteMigration, log: false) + end + + @tag :modify_foreign_key_on_update + test "modify foreign key's on_update constraint", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AlterForeignKeyOnUpdateMigration, log: false) + assert [2] == PoolRepo.all from p in "alter_fk_posts", select: p.alter_fk_user_id + :ok = down(PoolRepo, num, AlterForeignKeyOnUpdateMigration, log: false) + end + + @tag :remove_column + test "remove column", %{migration_number: num} do + assert :ok == up(PoolRepo, num, DropColumnMigration, log: false) + assert catch_error(PoolRepo.all from p in "drop_col_migration", select: p.to_be_removed) + :ok = down(PoolRepo, num, DropColumnMigration, log: false) + end + + @tag :rename_column + test "rename column", %{migration_number: num} do + assert :ok == up(PoolRepo, num, RenameColumnMigration, log: false) + assert [1] == PoolRepo.all from p in "rename_col_migration", select: p.was_renamed + :ok = down(PoolRepo, num, RenameColumnMigration, log: false) + end + + @tag :rename_table + test "rename table", %{migration_number: num} do + assert :ok == up(PoolRepo, num, RenameMigration, log: false) + assert :ok == down(PoolRepo, num, RenameMigration, log: false) + end + + @tag :prefix + test "prefix", %{migration_number: num} do + assert :ok == up(PoolRepo, num, PrefixMigration, log: false) + assert :ok == down(PoolRepo, num, PrefixMigration, log: false) + end + + @tag :alter_primary_key + test "alter primary key", %{migration_number: num} do + assert :ok == up(PoolRepo, num, AlterPrimaryKeyMigration, log: false) + assert :ok == down(PoolRepo, num, AlterPrimaryKeyMigration, log: false) + end +end diff --git a/integration/mssql/ecto_sql/sql/migrator.exs b/integration/mssql/ecto_sql/sql/migrator.exs new file mode 100644 index 0000000..e9d2800 --- /dev/null +++ b/integration/mssql/ecto_sql/sql/migrator.exs @@ -0,0 +1,242 @@ +Code.require_file "../support/file_helpers.exs", __DIR__ + +defmodule Ecto.Integration.MigratorTest do + use Ecto.Integration.Case + + import Support.FileHelpers + import ExUnit.CaptureLog + import Ecto.Migrator + + alias Ecto.Integration.{TestRepo, PoolRepo} + alias Ecto.Migration.SchemaMigration + + setup config do + Process.register(self(), config.test) + PoolRepo.delete_all(SchemaMigration) + :ok + end + + defmodule AnotherSchemaMigration do + use Ecto.Migration + + def change do + execute TestRepo.create_prefix("bad_schema_migrations"), + TestRepo.drop_prefix("bad_schema_migrations") + + create table(:schema_migrations, prefix: "bad_schema_migrations") do + add :version, :string + add :inserted_at, :integer + end + end + end + + defmodule BrokenLinkMigration do + use Ecto.Migration + + def change do + Task.start_link(fn -> raise "oops" end) + Process.sleep(:infinity) + end + end + + defmodule GoodMigration do + use Ecto.Migration + + def up do + create table(:good_migration) + end + + def down do + drop table(:good_migration) + end + end + + defmodule BadMigration do + use Ecto.Migration + + def change do + execute "CREATE WHAT" + end + end + + test "migrations up and down" do + assert migrated_versions(PoolRepo) == [] + assert up(PoolRepo, 31, GoodMigration, log: false) == :ok + + [migration] = PoolRepo.all(SchemaMigration) + assert migration.version == 31 + assert migration.inserted_at + + assert migrated_versions(PoolRepo) == [31] + assert up(PoolRepo, 31, GoodMigration, log: false) == :already_up + assert migrated_versions(PoolRepo) == [31] + assert down(PoolRepo, 32, GoodMigration, log: false) == :already_down + assert migrated_versions(PoolRepo) == [31] + assert down(PoolRepo, 31, GoodMigration, log: false) == :ok + assert migrated_versions(PoolRepo) == [] + end + + test "does not commit migration if insert into schema migration fails" do + # First we create a new schema migration table in another prefix + assert up(PoolRepo, 33, AnotherSchemaMigration, log: false) == :ok + assert migrated_versions(PoolRepo) == [33] + + assert capture_log(fn -> + catch_error(up(PoolRepo, 34, GoodMigration, log: false, prefix: "bad_schema_migrations")) + catch_error(PoolRepo.all("good_migration")) + catch_error(PoolRepo.all("good_migration", prefix: "bad_schema_migrations")) + end) =~ "Could not update schema migrations" + + assert down(PoolRepo, 33, AnotherSchemaMigration, log: false) == :ok + end + + test "bad execute migration" do + assert catch_error(up(PoolRepo, 31, BadMigration, log: false)) + end + + test "broken link migration" do + Process.flag(:trap_exit, true) + + assert capture_log(fn -> + {:ok, pid} = Task.start_link(fn -> up(PoolRepo, 31, BrokenLinkMigration, log: false) end) + assert_receive {:EXIT, ^pid, _} + end) =~ "oops" + + assert capture_log(fn -> + catch_exit(up(PoolRepo, 31, BrokenLinkMigration, log: false)) + end) =~ "oops" + end + + test "run up to/step migration", config do + in_tmp fn path -> + create_migration(47, config) + create_migration(48, config) + + assert [47] = run(PoolRepo, path, :up, step: 1, log: false) + assert count_entries() == 1 + + assert [48] = run(PoolRepo, path, :up, to: 48, log: false) + end + end + + test "run down to/step migration", config do + in_tmp fn path -> + migrations = [ + create_migration(49, config), + create_migration(50, config), + ] + + assert [49, 50] = run(PoolRepo, path, :up, all: true, log: false) + purge migrations + + assert [50] = run(PoolRepo, path, :down, step: 1, log: false) + purge migrations + + assert count_entries() == 1 + assert [50] = run(PoolRepo, path, :up, to: 50, log: false) + end + end + + test "runs all migrations", config do + in_tmp fn path -> + migrations = [ + create_migration(53, config), + create_migration(54, config), + ] + + assert [53, 54] = run(PoolRepo, path, :up, all: true, log: false) + assert [] = run(PoolRepo, path, :up, all: true, log: false) + purge migrations + + assert [54, 53] = run(PoolRepo, path, :down, all: true, log: false) + purge migrations + + assert count_entries() == 0 + assert [53, 54] = run(PoolRepo, path, :up, all: true, log: false) + end + end + + test "does not commit half transactions on bad syntax", config do + in_tmp fn path -> + migrations = [ + create_migration(64, config), + create_migration("65_+", config) + ] + + assert_raise SyntaxError, fn -> + run(PoolRepo, path, :up, all: true, log: false) + end + + refute_received {:up, _} + assert count_entries() == 0 + purge migrations + end + end + + test "raises when connection pool is too small" do + config = Application.fetch_env!(:ecto_sql, PoolRepo) + config = Keyword.merge(config, pool_size: 1) + Application.put_env(:ecto_sql, __MODULE__.SingleConnectionRepo, config) + + defmodule SingleConnectionRepo do + use Ecto.Repo, otp_app: :ecto_sql, adapter: PoolRepo.__adapter__ + end + + {:ok, _pid} = SingleConnectionRepo.start_link + + in_tmp fn path -> + exception_message = ~r/Migrations failed to run because the connection pool size is less than 2/ + + assert_raise Ecto.MigrationError, exception_message, fn -> + run(SingleConnectionRepo, path, :up, all: true, log: false) + end + end + end + + test "does not raise when connection pool is too small but there is no lock" do + config = Application.fetch_env!(:ecto_sql, PoolRepo) + config = Keyword.merge(config, pool_size: 1, migration_lock: nil) + Application.put_env(:ecto_sql, __MODULE__.SingleConnectionNoLockRepo, config) + + defmodule SingleConnectionNoLockRepo do + use Ecto.Repo, otp_app: :ecto_sql, adapter: PoolRepo.__adapter__ + end + + {:ok, _pid} = SingleConnectionNoLockRepo.start_link + + in_tmp fn path -> + run(SingleConnectionNoLockRepo, path, :up, all: true, log: false) + end + end + + defp count_entries() do + PoolRepo.aggregate(SchemaMigration, :count, :version) + end + + defp create_migration(num, config) do + module = Module.concat(__MODULE__, "Migration#{num}") + + File.write! "#{num}_migration_#{num}.exs", """ + defmodule #{module} do + use Ecto.Migration + + def up do + send #{inspect config.test}, {:up, #{inspect num}} + end + + def down do + send #{inspect config.test}, {:down, #{inspect num}} + end + end + """ + + module + end + + defp purge(modules) do + Enum.each(List.wrap(modules), fn m -> + :code.delete m + :code.purge m + end) + end +end diff --git a/integration/mssql/ecto_sql/sql/stream.exs b/integration/mssql/ecto_sql/sql/stream.exs new file mode 100644 index 0000000..d88c464 --- /dev/null +++ b/integration/mssql/ecto_sql/sql/stream.exs @@ -0,0 +1,44 @@ +defmodule Ecto.Integration.StreamTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.TestRepo + alias Ecto.Integration.Post + alias Ecto.Integration.Comment + import Ecto.Query + + test "stream empty" do + assert {:ok, []} = TestRepo.transaction(fn() -> + TestRepo.stream(Post) + |> Enum.to_list() + end) + + assert {:ok, []} = TestRepo.transaction(fn() -> + TestRepo.stream(from p in Post) + |> Enum.to_list() + end) + end + + test "stream without schema" do + %Post{} = TestRepo.insert!(%Post{title: "title1"}) + %Post{} = TestRepo.insert!(%Post{title: "title2"}) + + assert {:ok, ["title1", "title2"]} = TestRepo.transaction(fn() -> + TestRepo.stream(from(p in "posts", order_by: p.title, select: p.title)) + |> Enum.to_list() + end) + end + + test "stream with assoc" do + p1 = TestRepo.insert!(%Post{title: "1"}) + + %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id}) + %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) + + stream = TestRepo.stream(Ecto.assoc(p1, :comments)) + assert {:ok, [c1, c2]} = TestRepo.transaction(fn() -> + Enum.to_list(stream) + end) + assert c1.id == cid1 + assert c2.id == cid2 + end +end diff --git a/integration/mssql/ecto_sql/sql/subquery.exs b/integration/mssql/ecto_sql/sql/subquery.exs new file mode 100644 index 0000000..cb15cae --- /dev/null +++ b/integration/mssql/ecto_sql/sql/subquery.exs @@ -0,0 +1,113 @@ +defmodule Ecto.Integration.SubQueryTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.TestRepo + import Ecto.Query + alias Ecto.Integration.Post + alias Ecto.Integration.Comment + + test "from: subqueries with select source" do + TestRepo.insert!(%Post{text: "hello", public: true}) + + query = from p in Post, select: p + assert ["hello"] = + TestRepo.all(from p in subquery(query), select: p.text) + assert [post] = + TestRepo.all(from p in subquery(query), select: p) + + assert %NaiveDateTime{} = post.inserted_at + assert post.__meta__.state == :loaded + end + + @tag :map_boolean_in_expression + test "from: subqueries with map and select expression" do + TestRepo.insert!(%Post{text: "hello", public: true}) + + query = from p in Post, select: %{text: p.text, pub: not p.public} + assert ["hello"] = + TestRepo.all(from p in subquery(query), select: p.text) + assert [%{text: "hello", pub: false}] = + TestRepo.all(from p in subquery(query), select: p) + assert [{"hello", %{text: "hello", pub: false}}] = + TestRepo.all(from p in subquery(query), select: {p.text, p}) + assert [{%{text: "hello", pub: false}, false}] = + TestRepo.all(from p in subquery(query), select: {p, p.pub}) + end + + @tag :map_boolean_in_expression + test "from: subqueries with map update and select expression" do + TestRepo.insert!(%Post{text: "hello", public: true}) + + query = from p in Post, select: %{p | public: not p.public} + assert ["hello"] = + TestRepo.all(from p in subquery(query), select: p.text) + assert [%Post{text: "hello", public: false}] = + TestRepo.all(from p in subquery(query), select: p) + assert [{"hello", %Post{text: "hello", public: false}}] = + TestRepo.all(from p in subquery(query), select: {p.text, p}) + assert [{%Post{text: "hello", public: false}, false}] = + TestRepo.all(from p in subquery(query), select: {p, p.public}) + end + + test "from: subqueries with map update on virtual field and select expression" do + TestRepo.insert!(%Post{text: "hello"}) + + query = from p in Post, select: %{p | temp: p.text} + assert ["hello"] = + TestRepo.all(from p in subquery(query), select: p.temp) + assert [%Post{text: "hello", temp: "hello"}] = + TestRepo.all(from p in subquery(query), select: p) + end + + test "from: subqueries with aggregates" do + TestRepo.insert!(%Post{visits: 10}) + TestRepo.insert!(%Post{visits: 11}) + TestRepo.insert!(%Post{visits: 13}) + + query = from p in Post, select: [:visits], order_by: [asc: :visits] + assert [13] = TestRepo.all(from p in subquery(query), select: max(p.visits)) + query = from p in Post, select: [:visits], order_by: [asc: :visits], limit: 2 + assert [11] = TestRepo.all(from p in subquery(query), select: max(p.visits)) + + query = from p in Post, order_by: [asc: :visits] + assert [13] = TestRepo.all(from p in subquery(query), select: max(p.visits)) + query = from p in Post, order_by: [asc: :visits], limit: 2 + assert [11] = TestRepo.all(from p in subquery(query), select: max(p.visits)) + end + + test "from: subqueries with parameters" do + TestRepo.insert!(%Post{visits: 10, text: "hello"}) + TestRepo.insert!(%Post{visits: 11, text: "hello"}) + TestRepo.insert!(%Post{visits: 13, text: "world"}) + + query = from p in Post, where: p.visits >= ^11 and p.visits <= ^13 + query = from p in subquery(query), where: p.text == ^"hello", select: fragment("? + ?", p.visits, ^1) + assert [12] = TestRepo.all(query) + end + + test "join: subqueries with select source" do + %{id: id} = TestRepo.insert!(%Post{text: "hello", public: true}) + TestRepo.insert!(%Comment{post_id: id}) + + query = from p in Post, select: p + assert ["hello"] = + TestRepo.all(from c in Comment, join: p in subquery(query), on: c.post_id == p.id, select: p.text) + assert [%Post{inserted_at: %NaiveDateTime{}}] = + TestRepo.all(from c in Comment, join: p in subquery(query), on: c.post_id == p.id, select: p) + end + + test "join: subqueries with parameters" do + TestRepo.insert!(%Post{visits: 10, text: "hello"}) + TestRepo.insert!(%Post{visits: 11, text: "hello"}) + TestRepo.insert!(%Post{visits: 13, text: "world"}) + TestRepo.insert!(%Comment{}) + TestRepo.insert!(%Comment{}) + + query = from p in Post, where: p.visits >= ^11 and p.visits <= ^13 + query = from c in Comment, + join: p in subquery(query), + where: p.text == ^"hello", + select: fragment("? + ?", p.visits, ^1) + assert [12, 12] = TestRepo.all(query) + end +end diff --git a/integration/mssql/ecto_sql/sql/transaction.exs b/integration/mssql/ecto_sql/sql/transaction.exs new file mode 100644 index 0000000..48fba57 --- /dev/null +++ b/integration/mssql/ecto_sql/sql/transaction.exs @@ -0,0 +1,259 @@ +defmodule Ecto.Integration.TransactionTest do + # We can keep this test async as long as it + # is the only one access the transactions table + use Ecto.Integration.Case, async: true + + import Ecto.Query + alias Ecto.Integration.PoolRepo # Used for writes + alias Ecto.Integration.TestRepo # Used for reads + + @moduletag :capture_log + + defmodule UniqueError do + defexception message: "unique error" + end + + setup do + PoolRepo.delete_all "transactions" + :ok + end + + defmodule Trans do + use Ecto.Schema + + schema "transactions" do + field :num, :integer + end + end + + test "transaction returns value" do + refute PoolRepo.in_transaction? + {:ok, val} = PoolRepo.transaction(fn -> + assert PoolRepo.in_transaction? + {:ok, val} = + PoolRepo.transaction(fn -> + assert PoolRepo.in_transaction? + 42 + end) + assert PoolRepo.in_transaction? + val + end) + refute PoolRepo.in_transaction? + assert val == 42 + end + + test "transaction re-raises" do + assert_raise UniqueError, fn -> + PoolRepo.transaction(fn -> + PoolRepo.transaction(fn -> + raise UniqueError + end) + end) + end + end + + test "transaction commits" do + PoolRepo.transaction(fn -> + e = PoolRepo.insert!(%Trans{num: 1}) + assert [^e] = PoolRepo.all(Trans) + assert [] = TestRepo.all(Trans) + end) + + assert [%Trans{num: 1}] = PoolRepo.all(Trans) + end + + test "transaction rolls back" do + try do + PoolRepo.transaction(fn -> + e = PoolRepo.insert!(%Trans{num: 2}) + assert [^e] = PoolRepo.all(Trans) + assert [] = TestRepo.all(Trans) + raise UniqueError + end) + rescue + UniqueError -> :ok + end + + assert [] = TestRepo.all(Trans) + end + + test "transaction rolls back per repository" do + message = "cannot call rollback outside of transaction" + + assert_raise RuntimeError, message, fn -> + PoolRepo.rollback(:done) + end + + assert_raise RuntimeError, message, fn -> + TestRepo.transaction fn -> + PoolRepo.rollback(:done) + end + end + end + + test "transaction rolls back with reason on aborted transaction" do + e1 = PoolRepo.insert!(%Trans{num: 13}) + + assert_raise Ecto.ConstraintError, fn -> + TestRepo.transaction fn -> + PoolRepo.insert!(%Trans{id: e1.id, num: 14}) + end + end + end + + test "nested transaction partial rollback" do + assert PoolRepo.transaction(fn -> + e1 = PoolRepo.insert!(%Trans{num: 3}) + assert [^e1] = PoolRepo.all(Trans) + + try do + PoolRepo.transaction(fn -> + e2 = PoolRepo.insert!(%Trans{num: 4}) + assert [^e1, ^e2] = PoolRepo.all(from(t in Trans, order_by: t.num)) + raise UniqueError + end) + rescue + UniqueError -> :ok + end + + assert_raise DBConnection.ConnectionError, "transaction rolling back", + fn() -> PoolRepo.insert!(%Trans{num: 5}) end + end) == {:error, :rollback} + + assert TestRepo.all(Trans) == [] + end + + test "manual rollback doesn't bubble up" do + x = PoolRepo.transaction(fn -> + e = PoolRepo.insert!(%Trans{num: 6}) + assert [^e] = PoolRepo.all(Trans) + PoolRepo.rollback(:oops) + end) + + assert x == {:error, :oops} + assert [] = TestRepo.all(Trans) + end + + test "manual rollback bubbles up on nested transaction" do + assert PoolRepo.transaction(fn -> + e = PoolRepo.insert!(%Trans{num: 7}) + assert [^e] = PoolRepo.all(Trans) + assert {:error, :oops} = PoolRepo.transaction(fn -> + PoolRepo.rollback(:oops) + end) + assert_raise DBConnection.ConnectionError, "transaction rolling back", + fn() -> PoolRepo.insert!(%Trans{num: 8}) end + end) == {:error, :rollback} + + assert [] = TestRepo.all(Trans) + end + + test "transactions are not shared in repo" do + pid = self() + + new_pid = spawn_link fn -> + PoolRepo.transaction(fn -> + e = PoolRepo.insert!(%Trans{num: 9}) + assert [^e] = PoolRepo.all(Trans) + send(pid, :in_transaction) + receive do + :commit -> :ok + after + 5000 -> raise "timeout" + end + end) + send(pid, :committed) + end + + receive do + :in_transaction -> :ok + after + 5000 -> raise "timeout" + end + assert [] = PoolRepo.all(Trans) + + send(new_pid, :commit) + receive do + :committed -> :ok + after + 5000 -> raise "timeout" + end + + assert [%Trans{num: 9}] = PoolRepo.all(Trans) + end + + ## Checkout + + describe "with checkouts" do + test "transaction inside checkout" do + PoolRepo.checkout(fn -> + refute PoolRepo.in_transaction? + PoolRepo.transaction(fn -> + assert PoolRepo.in_transaction? + end) + refute PoolRepo.in_transaction? + end) + end + + test "checkout inside transaction" do + PoolRepo.transaction(fn -> + assert PoolRepo.in_transaction? + PoolRepo.checkout(fn -> + assert PoolRepo.in_transaction? + end) + assert PoolRepo.in_transaction? + end) + end + + test "checkout raises on transaction attempt" do + assert_raise DBConnection.ConnectionError, ~r"connection was checked out with status", fn -> + PoolRepo.checkout(fn -> PoolRepo.query!("BEGIN") end) + end + end + end + + ## Logging + + defp register_telemetry() do + Process.put(:telemetry, fn _, measurements, event -> send(self(), {measurements, event}) end) + end + + test "log begin, commit and rollback" do + register_telemetry() + + PoolRepo.transaction(fn -> + assert_received {measurements, %{params: [], result: {:ok, _res}}} + assert is_integer(measurements.query_time) and measurements.query_time >= 0 + assert is_integer(measurements.queue_time) and measurements.queue_time >= 0 + + refute_received %{} + register_telemetry() + end) + + assert_received {measurements, %{params: [], result: {:ok, _res}}} + assert is_integer(measurements.query_time) and measurements.query_time >= 0 + refute Map.has_key?(measurements, :queue_time) + + assert PoolRepo.transaction(fn -> + refute_received %{} + register_telemetry() + PoolRepo.rollback(:log_rollback) + end) == {:error, :log_rollback} + + assert_received {measurements, %{params: [], result: {:ok, _res}}} + assert is_integer(measurements.query_time) and measurements.query_time >= 0 + refute Map.has_key?(measurements, :queue_time) + end + + test "log queries inside transactions" do + PoolRepo.transaction(fn -> + register_telemetry() + assert [] = PoolRepo.all(Trans) + + assert_received {measurements, %{params: [], result: {:ok, _res}}} + assert is_integer(measurements.query_time) and measurements.query_time >= 0 + assert is_integer(measurements.decode_time) and measurements.query_time >= 0 + refute Map.has_key?(measurements, :queue_time) + end) + end +end diff --git a/integration/mssql/support/file_helpers.exs b/integration/mssql/ecto_sql/support/file_helpers.exs similarity index 83% rename from integration/mssql/support/file_helpers.exs rename to integration/mssql/ecto_sql/support/file_helpers.exs index b505174..947ff49 100644 --- a/integration/mssql/support/file_helpers.exs +++ b/integration/mssql/ecto_sql/support/file_helpers.exs @@ -13,13 +13,7 @@ defmodule Support.FileHelpers do tailored for this test case and test. """ defmacro in_tmp(fun) do - path = - Path.join([ - tmp_path(), - "#{__CALLER__.module}", - "#{elem(__CALLER__.function, 0)}" - ]) - + path = Path.join([tmp_path(), "#{__CALLER__.module}", "#{elem(__CALLER__.function, 0)}"]) quote do path = unquote(path) File.rm_rf!(path) @@ -44,6 +38,6 @@ defmodule Support.FileHelpers do end def assert_file(file, match) do - assert_file(file, &assert(&1 =~ match)) + assert_file file, &(assert &1 =~ match) end end diff --git a/integration/mssql/ecto_sql/support/migration.exs b/integration/mssql/ecto_sql/support/migration.exs new file mode 100644 index 0000000..1efd8a3 --- /dev/null +++ b/integration/mssql/ecto_sql/support/migration.exs @@ -0,0 +1,121 @@ +defmodule Ecto.Integration.Migration do + use Ecto.Migration + + def change do + create table(:users, comment: "users table") do + add :name, :string, comment: "name column" + add :custom_id, :uuid + timestamps() + end + + create table(:posts) do + add :title, :string, size: 100 + add :counter, :integer + add :text, :binary + add :bid, :binary_id + add :uuid, :uuid + add :meta, :map + add :links, {:map, :string} + add :intensities, {:map, :float} + add :public, :boolean + add :cost, :decimal, precision: 2, scale: 1 + add :visits, :integer + add :intensity, :float + add :author_id, :integer + add :posted, :date + timestamps(null: true) + end + + create table(:posts_users, primary_key: false) do + add :post_id, references(:posts) + add :user_id, references(:users) + end + + create table(:posts_users_pk) do + add :post_id, references(:posts) + add :user_id, references(:users) + timestamps() + end + + # Add a unique index on uuid. We use this + # to verify the behaviour that the index + # only matters if the UUID column is not NULL. + create unique_index(:posts, [:uuid], comment: "posts index") + + create table(:permalinks) do + add :uniform_resource_locator, :string + add :post_id, references(:posts) + add :user_id, references(:users) + end + + create unique_index(:permalinks, [:uniform_resource_locator]) + + create table(:comments) do + add :text, :string, size: 100 + add :lock_version, :integer, default: 1 + add :post_id, references(:posts) + add :author_id, references(:users) + end + + create table(:customs, primary_key: false) do + add :bid, :binary_id, primary_key: true + add :uuid, :uuid + end + + create unique_index(:customs, [:uuid]) + + create table(:customs_customs, primary_key: false) do + add :custom_id1, references(:customs, column: :bid, type: :binary_id) + add :custom_id2, references(:customs, column: :bid, type: :binary_id) + end + + create table(:barebones) do + add :num, :integer + end + + create table(:transactions) do + add :num, :integer + end + + create table(:lock_counters) do + add :count, :integer + end + + create table(:orders) do + add :instructions, :text + add :item, :map + add :permalink_id, references(:permalinks) + end + + unless :array_type in ExUnit.configuration[:exclude] do + create table(:tags) do + add :ints, {:array, :integer} + add :uuids, {:array, :uuid}, default: [] + add :items, {:array, :map} + end + end + + create table(:composite_pk, primary_key: false) do + add :a, :integer, primary_key: true + add :b, :integer, primary_key: true + add :name, :string + end + + create table(:corrupted_pk, primary_key: false) do + add :a, :string + end + + create table(:posts_users_composite_pk) do + add :post_id, references(:posts), primary_key: true + add :user_id, references(:users), primary_key: true + timestamps() + end + + create unique_index(:posts_users_composite_pk, [:post_id, :user_id]) + + create table(:usecs) do + add :naive_datetime_usec, :naive_datetime_usec + add :utc_datetime_usec, :utc_datetime_usec + end + end +end diff --git a/integration/mssql/ecto_sql/support/repo.exs b/integration/mssql/ecto_sql/support/repo.exs new file mode 100644 index 0000000..f17c838 --- /dev/null +++ b/integration/mssql/ecto_sql/support/repo.exs @@ -0,0 +1,23 @@ +defmodule Ecto.Integration.Repo do + defmacro __using__(opts) do + quote do + use Ecto.Repo, unquote(opts) + + @query_event __MODULE__ + |> Module.split() + |> Enum.map(& &1 |> Macro.underscore() |> String.to_atom()) + |> Kernel.++([:query]) + + def init(_, opts) do + fun = &Ecto.Integration.Repo.handle_event/4 + :telemetry.attach_many(__MODULE__, [[:custom], @query_event], fun, :ok) + {:ok, opts} + end + end + end + + def handle_event(event, latency, metadata, _config) do + handler = Process.delete(:telemetry) || fn _, _, _ -> :ok end + handler.(event, latency, metadata) + end +end diff --git a/integration/mssql/ecto_sql/support/schemas.exs b/integration/mssql/ecto_sql/support/schemas.exs new file mode 100644 index 0000000..6fd2f47 --- /dev/null +++ b/integration/mssql/ecto_sql/support/schemas.exs @@ -0,0 +1,332 @@ +defmodule Ecto.Integration.Schema do + defmacro __using__(_) do + quote do + use Ecto.Schema + type = + Application.get_env(:ecto, :primary_key_type) || + raise ":primary_key_type not set in :ecto application" + @primary_key {:id, type, autogenerate: true} + @foreign_key_type type + end + end +end + +defmodule PrefixedString do + use Ecto.Type + def type(), do: :string + def cast(string), do: {:ok, string} + def load(string), do: {:ok, "PREFIX-" <> string} + def dump("PREFIX-" <> string), do: {:ok, string} + def dump(_string), do: :error + def embed_as(_), do: :dump +end + +defmodule Ecto.Integration.Post do + @moduledoc """ + This module is used to test: + + * Overall functionality + * Overall types + * Non-null timestamps + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + import Ecto.Changeset + + schema "posts" do + field :counter, :id # Same as integer + field :title, :string + field :text, :binary + field :temp, :string, default: "temp", virtual: true + field :public, :boolean, default: true + field :cost, :decimal + field :visits, :integer + field :intensity, :float + field :bid, :binary_id + field :uuid, Ecto.Integration.TestRepo.uuid(), autogenerate: true + field :meta, :map + field :links, {:map, :string} + field :intensities, {:map, :float} + field :posted, :date + has_many :comments, Ecto.Integration.Comment, on_delete: :delete_all, on_replace: :delete + # The post<->permalink relationship should be marked as uniq + has_one :permalink, Ecto.Integration.Permalink, on_delete: :delete_all, on_replace: :delete + has_one :update_permalink, Ecto.Integration.Permalink, foreign_key: :post_id, on_delete: :delete_all, on_replace: :update + has_many :comments_authors, through: [:comments, :author] + belongs_to :author, Ecto.Integration.User + many_to_many :users, Ecto.Integration.User, + join_through: "posts_users", on_delete: :delete_all, on_replace: :delete + many_to_many :unique_users, Ecto.Integration.User, + join_through: "posts_users", unique: true + many_to_many :constraint_users, Ecto.Integration.User, + join_through: Ecto.Integration.PostUserCompositePk + has_many :users_comments, through: [:users, :comments] + has_many :comments_authors_permalinks, through: [:comments_authors, :permalink] + has_one :post_user_composite_pk, Ecto.Integration.PostUserCompositePk + timestamps() + end + + def changeset(schema, params) do + cast(schema, params, ~w(counter title text temp public cost visits + intensity bid uuid meta posted)a) + end +end + +defmodule Ecto.Integration.Comment do + @moduledoc """ + This module is used to test: + + * Optimistic lock + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "comments" do + field :text, :string + field :lock_version, :integer, default: 1 + belongs_to :post, Ecto.Integration.Post + belongs_to :author, Ecto.Integration.User + has_one :post_permalink, through: [:post, :permalink] + end + + def changeset(schema, params) do + Ecto.Changeset.cast(schema, params, [:text]) + end +end + +defmodule Ecto.Integration.Permalink do + @moduledoc """ + This module is used to test: + + * Field sources + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "permalinks" do + field :url, :string, source: :uniform_resource_locator + belongs_to :post, Ecto.Integration.Post, on_replace: :nilify + belongs_to :update_post, Ecto.Integration.Post, on_replace: :update, foreign_key: :post_id, define_field: false + belongs_to :user, Ecto.Integration.User + has_many :post_comments_authors, through: [:post, :comments_authors] + end + + def changeset(schema, params) do + Ecto.Changeset.cast(schema, params, [:url]) + end +end + +defmodule Ecto.Integration.PostUser do + @moduledoc """ + This module is used to test: + + * Many to many associations join_through with schema + + """ + use Ecto.Integration.Schema + + schema "posts_users_pk" do + belongs_to :user, Ecto.Integration.User + belongs_to :post, Ecto.Integration.Post + timestamps() + end +end + +defmodule Ecto.Integration.User do + @moduledoc """ + This module is used to test: + + * UTC Timestamps + * Relationships + * Dependent callbacks + + """ + use Ecto.Integration.Schema + + schema "users" do + field :name, :string + has_many :comments, Ecto.Integration.Comment, foreign_key: :author_id, on_delete: :nilify_all, on_replace: :nilify + has_one :permalink, Ecto.Integration.Permalink, on_replace: :nilify + has_many :posts, Ecto.Integration.Post, foreign_key: :author_id, on_delete: :nothing, on_replace: :delete + belongs_to :custom, Ecto.Integration.Custom, references: :bid, type: :binary_id + many_to_many :schema_posts, Ecto.Integration.Post, join_through: Ecto.Integration.PostUser + many_to_many :unique_posts, Ecto.Integration.Post, join_through: Ecto.Integration.PostUserCompositePk + timestamps(type: :utc_datetime) + end +end + +defmodule Ecto.Integration.Custom do + @moduledoc """ + This module is used to test: + + * binary_id primary key + * Tying another schemas to an existing schema + + Due to the second item, it must be a subset of posts. + """ + use Ecto.Integration.Schema + + @primary_key {:bid, :binary_id, autogenerate: true} + schema "customs" do + field :uuid, Ecto.Integration.TestRepo.uuid() + many_to_many :customs, Ecto.Integration.Custom, + join_through: "customs_customs", join_keys: [custom_id1: :bid, custom_id2: :bid], + on_delete: :delete_all, on_replace: :delete + end +end + +defmodule Ecto.Integration.Barebone do + @moduledoc """ + This module is used to test: + + * A schema without primary keys + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "barebones" do + field :num, :integer + end +end + +defmodule Ecto.Integration.Tag do + @moduledoc """ + This module is used to test: + + * The array type + * Embedding many schemas (uses array) + + """ + use Ecto.Integration.Schema + + schema "tags" do + field :ints, {:array, :integer} + field :uuids, {:array, Ecto.Integration.TestRepo.uuid()} + embeds_many :items, Ecto.Integration.Item + end +end + +defmodule Ecto.Integration.Item do + @moduledoc """ + This module is used to test: + + * Embedding + + """ + use Ecto.Schema + + embedded_schema do + field :reference, PrefixedString + field :price, :integer + field :valid_at, :date + + embeds_one :primary_color, Ecto.Integration.ItemColor + embeds_many :secondary_colors, Ecto.Integration.ItemColor + end +end + +defmodule Ecto.Integration.ItemColor do + @moduledoc """ + This module is used to test: + + * Nested embeds + + """ + use Ecto.Schema + + embedded_schema do + field :name, :string + end +end + +defmodule Ecto.Integration.Order do + @moduledoc """ + This module is used to test: + + * Text columns + * Embedding one schema + + """ + use Ecto.Integration.Schema + + schema "orders" do + field :instructions, :string + embeds_one :item, Ecto.Integration.Item + belongs_to :permalink, Ecto.Integration.Permalink + end +end + +defmodule Ecto.Integration.CompositePk do + @moduledoc """ + This module is used to test: + + * Composite primary keys + + """ + use Ecto.Integration.Schema + import Ecto.Changeset + + @primary_key false + schema "composite_pk" do + field :a, :integer, primary_key: true + field :b, :integer, primary_key: true + field :name, :string + end + def changeset(schema, params) do + cast(schema, params, ~w(a b name)a) + end +end + +defmodule Ecto.Integration.CorruptedPk do + @moduledoc """ + This module is used to test: + + * Primary keys that is not unique on a DB side + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "corrupted_pk" do + field :a, :string, primary_key: true + end +end + +defmodule Ecto.Integration.PostUserCompositePk do + @moduledoc """ + This module is used to test: + + * Composite primary keys for 2 belongs_to fields + + """ + use Ecto.Integration.Schema + + @primary_key false + schema "posts_users_composite_pk" do + belongs_to :user, Ecto.Integration.User, primary_key: true + belongs_to :post, Ecto.Integration.Post, primary_key: true + timestamps() + end +end + +defmodule Ecto.Integration.Usec do + @moduledoc """ + This module is used to test: + + * usec datetime types + + """ + use Ecto.Integration.Schema + + schema "usecs" do + field :naive_datetime_usec, :naive_datetime_usec + field :utc_datetime_usec, :utc_datetime_usec + end +end diff --git a/integration/mssql/ecto_sql/support/types.exs b/integration/mssql/ecto_sql/support/types.exs new file mode 100644 index 0000000..f71ac15 --- /dev/null +++ b/integration/mssql/ecto_sql/support/types.exs @@ -0,0 +1,16 @@ +defmodule Custom.Permalink do + def type, do: :id + + def cast(string) when is_binary(string) do + case Integer.parse(string) do + {int, _} -> {:ok, int} + :error -> :error + end + end + + def cast(integer) when is_integer(integer), do: {:ok, integer} + def cast(_), do: :error + + def load(integer) when is_integer(integer), do: {:ok, integer} + def dump(integer) when is_integer(integer), do: {:ok, integer} +end diff --git a/integration/mssql/sql/alter.exs b/integration/mssql/sql/alter.exs deleted file mode 100644 index a35abef..0000000 --- a/integration/mssql/sql/alter.exs +++ /dev/null @@ -1,125 +0,0 @@ -defmodule Ecto.Integration.AlterTest do - use Ecto.Integration.Case, async: false - - alias Ecto.Integration.PoolRepo - - defmodule AlterMigrationOne do - use Ecto.Migration - - def up do - create table(:alter_col_type) do - add(:value, :integer) - end - - execute("INSERT INTO alter_col_type (value) VALUES (1)") - end - - def down do - drop(table(:alter_col_type)) - end - end - - defmodule AlterMigrationTwo do - use Ecto.Migration - - def up do - alter table(:alter_col_type) do - modify(:value, :real) - end - end - - def down do - alter table(:alter_col_type) do - modify(:value, :integer) - end - end - end - - import Ecto.Query, only: [from: 1, from: 2] - import Ecto.Migrator, only: [up: 4, down: 4] - - test "reset cache on returning query after alter column type" do - values = from(v in "alter_col_type", select: v.value) - - assert :ok == - up(PoolRepo, 20_161_112_120_000, AlterMigrationOne, log: false) - - assert PoolRepo.all(values) == [1] - - assert :ok == - up(PoolRepo, 20_161_112_130_000, AlterMigrationTwo, log: false) - - # optionally fail once with ArgumentError when preparing query prepared on - # another connection (and clear cache) - try do - PoolRepo.all(values) - rescue - err in [ArgumentError] -> - assert Exception.message(err) =~ "stale type" - assert [%Decimal{}] = PoolRepo.all(values) - else - result -> - assert [%Decimal{}] = result - end - - PoolRepo.transaction(fn -> - assert [%Decimal{}] = PoolRepo.all(values) - - assert :ok == - down(PoolRepo, 20_161_112_130_000, AlterMigrationTwo, log: false) - - # optionally fail once with database error when already prepared on - # connection (and clear cache) - try do - PoolRepo.all(values, mode: :savepoint) - catch - :error, _ -> - assert PoolRepo.all(values) == [1] - else - result -> - assert result == [1] - end - end) - after - assert :ok == - down(PoolRepo, 20_161_112_120_000, AlterMigrationOne, log: false) - end - - test "reset cache on paramterised query after alter column type" do - values = from(v in "alter_col_type") - - assert :ok == - up(PoolRepo, 20_161_112_120_000, AlterMigrationOne, log: false) - - assert PoolRepo.update_all(values, set: [value: 2]) == {1, nil} - - assert :ok == - up(PoolRepo, 20_161_112_130_000, AlterMigrationTwo, log: false) - - # optionally fail once with ArgumentError when preparing query prepared on - # another connection (and clear cache) - try do - PoolRepo.update_all(values, set: [value: 3]) - rescue - err in [ArgumentError] -> - assert Exception.message(err) =~ "stale type" - assert PoolRepo.update_all(values, set: [value: 4]) == {1, nil} - else - result -> - assert result == {1, nil} - end - - PoolRepo.transaction(fn -> - assert PoolRepo.update_all(values, set: [value: Decimal.new(5)]) == - {1, nil} - - assert :ok == - down(PoolRepo, 20_161_112_130_000, AlterMigrationTwo, log: false) - - assert PoolRepo.update_all(values, set: [value: 6]) == {1, nil} - end) - after - assert :ok == - down(PoolRepo, 20_161_112_120_000, AlterMigrationOne, log: false) - end -end diff --git a/integration/mssql/sql/migration.exs b/integration/mssql/sql/migration.exs deleted file mode 100644 index 3d8c766..0000000 --- a/integration/mssql/sql/migration.exs +++ /dev/null @@ -1,573 +0,0 @@ -defmodule Ecto.Integration.MigrationTest do - # Cannot be async as other tests may migrate - use ExUnit.Case - - alias Ecto.Integration.PoolRepo - - defmodule CreateMigration do - use Ecto.Migration - - @table table(:create_table_migration) - @index index(:create_table_migration, [:value], unique: true) - - def up do - create @table do - add(:value, :integer) - end - - create(@index) - end - - def down do - drop(@index) - drop(@table) - end - end - - defmodule AddColumnMigration do - use Ecto.Migration - - def up do - create table(:add_col_migration) do - add(:value, :integer) - end - - alter table(:add_col_migration) do - add(:to_be_added, :integer) - end - - execute( - "INSERT INTO add_col_migration (value, to_be_added) VALUES (1, 2)" - ) - end - - def down do - drop(table(:add_col_migration)) - end - end - - defmodule AlterColumnMigration do - use Ecto.Migration - - def up do - create table(:alter_col_migration) do - add(:from_null_to_not_null, :integer) - add(:from_not_null_to_null, :integer, null: false) - - add(:from_default_to_no_default, :integer, default: 0) - add(:from_no_default_to_default, :integer) - end - - alter table(:alter_col_migration) do - modify(:from_null_to_not_null, :string, null: false) - modify(:from_not_null_to_null, :string, null: true) - - modify(:from_default_to_no_default, :integer, default: nil) - modify(:from_no_default_to_default, :integer, default: 0) - end - - execute( - "INSERT INTO alter_col_migration (from_null_to_not_null) VALUES ('foo')" - ) - end - - def down do - drop(table(:alter_col_migration)) - end - end - - defmodule AlterForeignKeyOnDeleteMigration do - use Ecto.Migration - - def up do - create(table(:alter_fk_users)) - - create table(:alter_fk_posts) do - add(:alter_fk_user_id, :bigint) - end - - alter table(:alter_fk_posts) do - modify( - :alter_fk_user_id, - references(:alter_fk_users, on_delete: :nilify_all) - ) - end - - execute("INSERT INTO alter_fk_users DEFAULT VALUES") - execute("INSERT INTO alter_fk_posts (alter_fk_user_id) DEFAULT VALUES") - execute("DELETE FROM alter_fk_users") - end - - def down do - drop(table(:alter_fk_posts)) - drop(table(:alter_fk_users)) - end - end - - defmodule AlterForeignKeyOnUpdateMigration do - use Ecto.Migration - - def up do - create(table(:alter_fk_users)) - - create table(:alter_fk_posts) do - add(:alter_fk_user_id, :id) - end - - alter table(:alter_fk_posts) do - modify( - :alter_fk_user_id, - references(:alter_fk_users, on_update: :update_all) - ) - end - - execute("INSERT INTO alter_fk_users (id) VALUES ('1')") - - execute( - "INSERT INTO alter_fk_posts (id, alter_fk_user_id) VALUES ('1', '1')" - ) - - execute("UPDATE alter_fk_users SET id = '2'") - end - - def down do - drop(table(:alter_fk_posts)) - drop(table(:alter_fk_users)) - end - end - - defmodule DropColumnMigration do - use Ecto.Migration - - def up do - create table(:drop_col_migration) do - add(:value, :integer) - add(:to_be_removed, :integer) - end - - execute( - "INSERT INTO drop_col_migration (value, to_be_removed) VALUES (1, 2)" - ) - - alter table(:drop_col_migration) do - remove(:to_be_removed) - end - end - - def down do - drop(table(:drop_col_migration)) - end - end - - defmodule RenameColumnMigration do - use Ecto.Migration - - def up do - create table(:rename_col_migration) do - add(:to_be_renamed, :integer) - end - - rename(table(:rename_col_migration), :to_be_renamed, to: :was_renamed) - - execute("INSERT INTO rename_col_migration (was_renamed) VALUES (1)") - end - - def down do - drop(table(:rename_col_migration)) - end - end - - defmodule OnDeleteMigration do - use Ecto.Migration - - def up do - create(table(:parent1)) - create(table(:parent2)) - - create table(:ref_migration) do - add(:parent1, references(:parent1, on_delete: :nilify_all)) - end - - alter table(:ref_migration) do - add(:parent2, references(:parent2, on_delete: :delete_all)) - end - end - - def down do - drop(table(:ref_migration)) - drop(table(:parent1)) - drop(table(:parent2)) - end - end - - defmodule ReferencesRollbackMigration do - use Ecto.Migration - - def change do - create table(:parent) do - add(:name, :string) - end - - create table(:child) do - add(:parent_id, references(:parent)) - end - end - end - - defmodule RenameMigration do - use Ecto.Migration - - @table_current table(:posts_migration) - @table_new table(:new_posts_migration) - - def up do - create(@table_current) - rename(@table_current, to: @table_new) - end - - def down do - drop(@table_new) - end - end - - defmodule PrefixMigration do - use Ecto.Migration - - @prefix "ecto_prefix_test" - - def up do - execute(PoolRepo.create_prefix(@prefix)) - create(table(:first, prefix: @prefix)) - - create table(:second, prefix: @prefix) do - add(:first_id, references(:first)) - end - end - - def down do - drop(table(:second, prefix: @prefix)) - drop(table(:first, prefix: @prefix)) - execute(PoolRepo.drop_prefix(@prefix)) - end - end - - defmodule NoSQLMigration do - use Ecto.Migration - - def up do - create(table(:collection, options: [capped: true])) - execute(create: "collection") - end - end - - defmodule Parent do - use Ecto.Schema - - schema "parent" do - end - end - - defmodule NoErrorTableMigration do - use Ecto.Migration - - def change do - create_if_not_exists table(:existing) do - add(:name, :string) - end - - create_if_not_exists table(:existing) do - add(:name, :string) - end - - create_if_not_exists(table(:existing)) - - drop_if_exists(table(:existing)) - drop_if_exists(table(:existing)) - end - end - - defmodule NoErrorIndexMigration do - use Ecto.Migration - - def change do - create_if_not_exists(index(:posts, [:title])) - create_if_not_exists(index(:posts, [:title])) - drop_if_exists(index(:posts, [:title])) - drop_if_exists(index(:posts, [:title])) - end - end - - defmodule InferredDropIndexMigration do - use Ecto.Migration - - def change do - create(index(:posts, [:title])) - end - end - - defmodule AlterPrimaryKeyMigration do - use Ecto.Migration - - def change do - create table(:no_pk, primary_key: false) do - add(:dummy, :string) - end - - alter table(:no_pk) do - add(:id, :serial, primary_key: true) - end - end - end - - import Ecto.Query, only: [from: 2] - import Ecto.Migrator, only: [up: 4, down: 4] - - test "create and drop table and indexes" do - assert :ok == up(PoolRepo, 20_050_906_120_000, CreateMigration, log: false) - - assert :ok == - down(PoolRepo, 20_050_906_120_000, CreateMigration, log: false) - end - - test "correctly infers how to drop index" do - assert :ok == - up( - PoolRepo, - 20_050_906_120_000, - InferredDropIndexMigration, - log: false - ) - - assert :ok == - down( - PoolRepo, - 20_050_906_120_000, - InferredDropIndexMigration, - log: false - ) - end - - test "supports references" do - assert :ok == - up(PoolRepo, 20_050_906_120_000, OnDeleteMigration, log: false) - - parent1 = PoolRepo.insert!(Ecto.put_meta(%Parent{}, source: "parent1")) - parent2 = PoolRepo.insert!(Ecto.put_meta(%Parent{}, source: "parent2")) - - writer = - "INSERT INTO ref_migration (parent1, parent2) VALUES (#{parent1.id}, #{ - parent2.id - })" - - PoolRepo.query!(writer) - - reader = from(r in "ref_migration", select: {r.parent1, r.parent2}) - assert PoolRepo.all(reader) == [{"#{parent1.id}", "#{parent2.id}"}] - - PoolRepo.delete!(parent1) - assert PoolRepo.all(reader) == [{nil, "#{parent2.id}"}] - - PoolRepo.delete!(parent2) - assert PoolRepo.all(reader) == [] - - assert :ok == - down(PoolRepo, 20_050_906_120_000, OnDeleteMigration, log: false) - end - - test "rolls back references in change/1" do - assert :ok == - up( - PoolRepo, - 19_850_423_000_000, - ReferencesRollbackMigration, - log: false - ) - - assert :ok == - down( - PoolRepo, - 19_850_423_000_000, - ReferencesRollbackMigration, - log: false - ) - end - - test "create table if not exists and drop table if exists does not raise on failure" do - assert :ok == - up(PoolRepo, 19_850_423_000_001, NoErrorTableMigration, log: false) - end - - @tag :create_index_if_not_exists - test "create index if not exists and drop index if exists does not raise on failure" do - assert :ok == - up(PoolRepo, 19_850_423_000_002, NoErrorIndexMigration, log: false) - end - - test "raises on NoSQL migrations" do - assert_raise ArgumentError, - ~r"does not support keyword lists in :options", - fn -> - up(PoolRepo, 20_150_704_120_000, NoSQLMigration, log: false) - end - end - - @tag :add_column - test "add column" do - assert :ok == - up(PoolRepo, 20_070_906_120_000, AddColumnMigration, log: false) - - assert [2] == - PoolRepo.all(from(p in "add_col_migration", select: p.to_be_added)) - - :ok = down(PoolRepo, 20_070_906_120_000, AddColumnMigration, log: false) - end - - @tag :modify_column - test "modify column" do - assert :ok == - up(PoolRepo, 20_080_906_120_000, AlterColumnMigration, log: false) - - assert ["foo"] == - PoolRepo.all( - from(p in "alter_col_migration", select: p.from_null_to_not_null) - ) - - assert [nil] == - PoolRepo.all( - from(p in "alter_col_migration", select: p.from_not_null_to_null) - ) - - assert [nil] == - PoolRepo.all( - from( - p in "alter_col_migration", - select: p.from_default_to_no_default - ) - ) - - assert [0] == - PoolRepo.all( - from( - p in "alter_col_migration", - select: p.from_no_default_to_default - ) - ) - - query = - "INSERT INTO alter_col_migration (from_not_null_to_null) VALUES ('foo')" - - assert catch_error(PoolRepo.query!(query)) - - :ok = down(PoolRepo, 20_080_906_120_000, AlterColumnMigration, log: false) - end - - @tag :modify_foreign_key_on_delete - test "modify foreign key's on_delete constraint" do - assert :ok == - up( - PoolRepo, - 20_130_802_170_000, - AlterForeignKeyOnDeleteMigration, - log: false - ) - - assert [nil] == - PoolRepo.all( - from(p in "alter_fk_posts", select: p.alter_fk_user_id) - ) - - :ok = - down( - PoolRepo, - 20_130_802_170_000, - AlterForeignKeyOnDeleteMigration, - log: false - ) - end - - @tag :modify_foreign_key_on_update - test "modify foreign key's on_update constraint" do - assert :ok == - up( - PoolRepo, - 20_130_802_170_000, - AlterForeignKeyOnUpdateMigration, - log: false - ) - - assert [2] == - PoolRepo.all( - from(p in "alter_fk_posts", select: p.alter_fk_user_id) - ) - - :ok = - down( - PoolRepo, - 20_130_802_170_000, - AlterForeignKeyOnUpdateMigration, - log: false - ) - end - - @tag :remove_column - test "remove column" do - assert :ok == - up(PoolRepo, 20_090_906_120_000, DropColumnMigration, log: false) - - assert catch_error( - PoolRepo.all( - from(p in "drop_col_migration", select: p.to_be_removed) - ) - ) - - :ok = down(PoolRepo, 20_090_906_120_000, DropColumnMigration, log: false) - end - - @tag :rename_column - test "rename column" do - assert :ok == - up(PoolRepo, 20_150_718_120_000, RenameColumnMigration, log: false) - - assert [1] == - PoolRepo.all( - from(p in "rename_col_migration", select: p.was_renamed) - ) - - :ok = down(PoolRepo, 20_150_718_120_000, RenameColumnMigration, log: false) - end - - @tag :rename_table - test "rename table" do - assert :ok == up(PoolRepo, 20_150_712_120_000, RenameMigration, log: false) - - assert :ok == - down(PoolRepo, 20_150_712_120_000, RenameMigration, log: false) - end - - @tag :prefix - test "prefix" do - assert :ok == up(PoolRepo, 20_151_012_120_000, PrefixMigration, log: false) - - assert :ok == - down(PoolRepo, 20_151_012_120_000, PrefixMigration, log: false) - end - - @tag :alter_primary_key - test "alter primary key" do - assert :ok == - up( - PoolRepo, - 20_151_012_120_000, - AlterPrimaryKeyMigration, - log: false - ) - - assert :ok == - down( - PoolRepo, - 20_151_012_120_000, - AlterPrimaryKeyMigration, - log: false - ) - end -end diff --git a/integration/mssql/sql/sandbox.exs b/integration/mssql/sql/sandbox.exs deleted file mode 100644 index f0af416..0000000 --- a/integration/mssql/sql/sandbox.exs +++ /dev/null @@ -1,156 +0,0 @@ -defmodule Ecto.Integration.SandboxTest do - use ExUnit.Case - - alias Ecto.Adapters.SQL.Sandbox - alias Ecto.Integration.TestRepo - alias Ecto.Integration.Post - - import ExUnit.CaptureLog - - test "include link to SQL sandbox on ownership errors" do - assert_raise DBConnection.OwnershipError, - ~r"See Ecto.Adapters.SQL.Sandbox docs for more information.", - fn -> - TestRepo.all(Post) - end - end - - test "can use the repository when checked out" do - assert_raise DBConnection.OwnershipError, - ~r"cannot find ownership process", - fn -> - TestRepo.all(Post) - end - - Sandbox.checkout(TestRepo) - assert TestRepo.all(Post) == [] - Sandbox.checkin(TestRepo) - - assert_raise DBConnection.OwnershipError, - ~r"cannot find ownership process", - fn -> - TestRepo.all(Post) - end - end - - test "can use the repository when allowed from another process" do - assert_raise DBConnection.OwnershipError, - ~r"cannot find ownership process", - fn -> - TestRepo.all(Post) - end - - parent = self() - - Task.start_link(fn -> - Sandbox.checkout(TestRepo) - Sandbox.allow(TestRepo, self(), parent) - send(parent, :allowed) - :timer.sleep(:infinity) - end) - - assert_receive :allowed - assert TestRepo.all(Post) == [] - end - - test "can use the repository when shared from another process" do - Sandbox.checkout(TestRepo) - Sandbox.mode(TestRepo, {:shared, self()}) - assert Task.async(fn -> TestRepo.all(Post) end) |> Task.await() == [] - after - Sandbox.mode(TestRepo, :manual) - end - - test "runs inside a sandbox that is rolled back on checkin" do - Sandbox.checkout(TestRepo) - assert TestRepo.insert(%Post{}) - assert TestRepo.all(Post) != [] - Sandbox.checkin(TestRepo) - Sandbox.checkout(TestRepo) - assert TestRepo.all(Post) == [] - Sandbox.checkin(TestRepo) - end - - test "runs inside a sandbox that may be disabled" do - Sandbox.checkout(TestRepo, sandbox: false) - assert TestRepo.insert(%Post{}) - assert TestRepo.all(Post) != [] - Sandbox.checkin(TestRepo) - - Sandbox.checkout(TestRepo) - assert {1, _} = TestRepo.delete_all(Post) - Sandbox.checkin(TestRepo) - - Sandbox.checkout(TestRepo, sandbox: false) - assert {1, _} = TestRepo.delete_all(Post) - Sandbox.checkin(TestRepo) - end - - @tag :transaction_isolation - test "runs inside a sandbox with custom isolation level" do - Sandbox.checkout(TestRepo, isolation: "READ UNCOMMITTED") - - # Setting it to the same level later on works - TestRepo.query!("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED") - - # Even inside a transaction - TestRepo.transaction(fn -> - TestRepo.query!("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED") - end) - end - - test "disconnects sandbox on transaction timeouts" do - Sandbox.checkout(TestRepo) - - assert capture_log(fn -> - TestRepo.transaction( - fn -> - :timer.sleep(1001) - end, - timeout: 1 - ) - end) =~ "timed out" - - Sandbox.checkin(TestRepo) - end - - test "runs inside a sandbox even with failed queries" do - Sandbox.checkout(TestRepo) - - {:ok, _} = TestRepo.insert(%Post{}, skip_transaction: true) - # This is a failed query but it should not taint the sandbox transaction - {:error, _} = TestRepo.query("INVALID") - {:ok, _} = TestRepo.insert(%Post{}, skip_transaction: true) - - Sandbox.checkin(TestRepo) - end - - test "works when preloading associations from another process" do - Sandbox.checkout(TestRepo) - assert TestRepo.insert(%Post{}) - parent = self() - - Task.start_link(fn -> - Sandbox.allow(TestRepo, parent, self()) - assert [_] = TestRepo.all(Post) |> TestRepo.preload([:author, :comments]) - send(parent, :success) - end) - - assert_receive :success - end - - test "allows an ownership timeout to be passed for an individual `checkout` call" do - log = - capture_log(fn -> - :ok = Sandbox.checkout(TestRepo, ownership_timeout: 20) - - Process.sleep(1000) - - assert_raise DBConnection.OwnershipError, fn -> - TestRepo.all(Post) - end - end) - - assert log =~ ~r/timed out.*20ms/ - end -end diff --git a/integration/mssql/sql/sql.exs b/integration/mssql/sql/sql.exs deleted file mode 100644 index 7cdca6a..0000000 --- a/integration/mssql/sql/sql.exs +++ /dev/null @@ -1,127 +0,0 @@ -defmodule Ecto.Integration.SQLTest do - use Ecto.Integration.Case, async: true - - alias Ecto.Integration.TestRepo - alias Ecto.Integration.Barebone - alias Ecto.Integration.Post - import Ecto.Query, only: [from: 2] - - test "fragmented types" do - datetime = ~N[2014-01-16 20:26:51.000000] - TestRepo.insert!(%Post{inserted_at: datetime}) - - query = - from( - p in Post, - where: fragment("? >= ?", p.inserted_at, ^datetime), - select: p.inserted_at - ) - - assert [^datetime] = TestRepo.all(query) - end - - test "fragmented schemaless types" do - TestRepo.insert!(%Post{visits: 123}) - - assert [123] = - TestRepo.all( - from(p in "posts", select: type(fragment("visits"), :integer)) - ) - end - - @tag :array_type - test "fragment array types" do - datetime1 = ~N[2014-01-16 00:00:00.0] - datetime2 = ~N[2014-02-16 00:00:00.0] - result = TestRepo.query!("SELECT $1::timestamp[]", [[datetime1, datetime2]]) - assert [[[{{2014, 1, 16}, _}, {{2014, 2, 16}, _}]]] = result.rows - end - - test "query!/4" do - result = TestRepo.query!("SELECT 1") - assert result.rows == [[1]] - end - - test "query!/4 with iodata" do - result = TestRepo.query!(["SELECT", ?\s, ?1]) - assert result.rows == [[1]] - end - - test "to_sql/3" do - {sql, []} = TestRepo.to_sql(:all, Barebone) - assert sql =~ "SELECT" - assert sql =~ "barebones" - - {sql, [0]} = - TestRepo.to_sql( - :update_all, - from(b in Barebone, update: [set: [num: ^0]]) - ) - - assert sql =~ "UPDATE" - assert sql =~ "barebones" - assert sql =~ "SET" - - {sql, []} = TestRepo.to_sql(:delete_all, Barebone) - assert sql =~ "DELETE" - assert sql =~ "barebones" - end - - test "Repo.insert! escape" do - TestRepo.insert!(%Post{title: "'"}) - - query = from(p in Post, select: p.title) - assert ["'"] == TestRepo.all(query) - end - - test "Repo.update! escape" do - p = TestRepo.insert!(%Post{title: "hello"}) - TestRepo.update!(Ecto.Changeset.change(p, title: "'")) - - query = from(p in Post, select: p.title) - assert ["'"] == TestRepo.all(query) - end - - test "Repo.insert_all escape" do - TestRepo.insert_all(Post, [%{title: "'"}]) - - query = from(p in Post, select: p.title) - assert ["'"] == TestRepo.all(query) - end - - test "Repo.update_all escape" do - TestRepo.insert!(%Post{title: "hello"}) - - TestRepo.update_all(Post, set: [title: "'"]) - reader = from(p in Post, select: p.title) - assert ["'"] == TestRepo.all(reader) - - query = from(Post, where: "'" != "") - TestRepo.update_all(query, set: [title: "''"]) - assert ["''"] == TestRepo.all(reader) - end - - test "Repo.delete_all escape" do - TestRepo.insert!(%Post{title: "hello"}) - assert [_] = TestRepo.all(Post) - - TestRepo.delete_all(from(Post, where: "'" == "'")) - assert [] == TestRepo.all(Post) - end - - test "load" do - inserted_at = ~N[2016-01-01 09:00:00.000000] - - TestRepo.insert!(%Post{ - title: "title1", - inserted_at: inserted_at, - public: false - }) - - result = Ecto.Adapters.SQL.query!(TestRepo, "SELECT * FROM posts", []) - posts = Enum.map(result.rows, &TestRepo.load(Post, {result.columns, &1})) - - assert [%Post{title: "title1", inserted_at: ^inserted_at, public: false}] = - posts - end -end diff --git a/integration/mssql/sql/stream.exs b/integration/mssql/sql/stream.exs deleted file mode 100644 index a7e2ad2..0000000 --- a/integration/mssql/sql/stream.exs +++ /dev/null @@ -1,82 +0,0 @@ -defmodule Ecto.Integration.StreamTest do - use Ecto.Integration.Case, async: true - - alias Ecto.Integration.TestRepo - alias Ecto.Integration.Post - alias Ecto.Integration.Comment - import Ecto.Query - - test "stream empty" do - assert {:ok, []} = - TestRepo.transaction(fn -> - TestRepo.stream(Post) - |> Enum.to_list() - end) - - assert {:ok, []} = - TestRepo.transaction(fn -> - TestRepo.stream(from(p in Post)) - |> Enum.to_list() - end) - end - - test "stream without schema" do - %Post{} = TestRepo.insert!(%Post{title: "title1"}) - %Post{} = TestRepo.insert!(%Post{title: "title2"}) - - assert {:ok, ["title1", "title2"]} = - TestRepo.transaction(fn -> - TestRepo.stream( - from(p in "posts", order_by: p.title, select: p.title) - ) - |> Enum.to_list() - end) - end - - test "stream with assoc" do - p1 = TestRepo.insert!(%Post{title: "1"}) - - %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id}) - %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) - - stream = TestRepo.stream(Ecto.assoc(p1, :comments)) - - assert {:ok, [c1, c2]} = - TestRepo.transaction(fn -> - Enum.to_list(stream) - end) - - assert c1.id == cid1 - assert c2.id == cid2 - end - - test "stream with preload" do - p1 = TestRepo.insert!(%Post{title: "1"}) - p2 = TestRepo.insert!(%Post{title: "2"}) - TestRepo.insert!(%Post{title: "3"}) - - %Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id}) - %Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id}) - %Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id}) - %Comment{id: cid4} = TestRepo.insert!(%Comment{text: "4", post_id: p2.id}) - - assert {:ok, [p1, p2, p3]} = - TestRepo.transaction(fn -> - from(p in Post, preload: [:comments], select: p) - |> TestRepo.stream(max_rows: 2) - |> sort_by_id() - end) - - assert [%Comment{id: ^cid1}, %Comment{id: ^cid2}] = - p1.comments |> sort_by_id - - assert [%Comment{id: ^cid3}, %Comment{id: ^cid4}] = - p2.comments |> sort_by_id - - assert [] = p3.comments - end - - defp sort_by_id(values) do - Enum.sort_by(values, & &1.id) - end -end diff --git a/integration/mssql/sql/subquery.exs b/integration/mssql/sql/subquery.exs deleted file mode 100644 index e883cb6..0000000 --- a/integration/mssql/sql/subquery.exs +++ /dev/null @@ -1,154 +0,0 @@ -Code.require_file("../support/types.exs", __DIR__) - -defmodule Ecto.Integration.SubQueryTest do - use Ecto.Integration.Case, async: true - - alias Ecto.Integration.TestRepo - import Ecto.Query - alias Ecto.Integration.Post - alias Ecto.Integration.Comment - - test "from: subqueries with select source" do - TestRepo.insert!(%Post{text: "hello", public: true}) - - query = from(p in Post, select: p) - assert ["hello"] = TestRepo.all(from(p in subquery(query), select: p.text)) - assert [post] = TestRepo.all(from(p in subquery(query), select: p)) - - assert %NaiveDateTime{} = post.inserted_at - assert post.__meta__.state == :loaded - end - - test "from: subqueries with map and select expression" do - TestRepo.insert!(%Post{text: "hello", public: true}) - - query = from(p in Post, select: %{text: p.text, pub: not p.public}) - assert ["hello"] = TestRepo.all(from(p in subquery(query), select: p.text)) - - assert [%{text: "hello", pub: false}] = - TestRepo.all(from(p in subquery(query), select: p)) - - assert [{"hello", %{text: "hello", pub: false}}] = - TestRepo.all(from(p in subquery(query), select: {p.text, p})) - - assert [{%{text: "hello", pub: false}, false}] = - TestRepo.all(from(p in subquery(query), select: {p, p.pub})) - end - - test "from: subqueries with map update and select expression" do - TestRepo.insert!(%Post{text: "hello", public: true}) - - query = from(p in Post, select: %{p | public: not p.public}) - assert ["hello"] = TestRepo.all(from(p in subquery(query), select: p.text)) - - assert [%Post{text: "hello", public: false}] = - TestRepo.all(from(p in subquery(query), select: p)) - - assert [{"hello", %Post{text: "hello", public: false}}] = - TestRepo.all(from(p in subquery(query), select: {p.text, p})) - - assert [{%Post{text: "hello", public: false}, false}] = - TestRepo.all(from(p in subquery(query), select: {p, p.public})) - end - - test "from: subqueries with map update on virtual field and select expression" do - TestRepo.insert!(%Post{text: "hello"}) - - query = from(p in Post, select: %{p | temp: p.text}) - assert ["hello"] = TestRepo.all(from(p in subquery(query), select: p.temp)) - - assert [%Post{text: "hello", temp: "hello"}] = - TestRepo.all(from(p in subquery(query), select: p)) - end - - test "from: subqueries with aggregates" do - TestRepo.insert!(%Post{visits: 10}) - TestRepo.insert!(%Post{visits: 11}) - TestRepo.insert!(%Post{visits: 13}) - - query = from(p in Post, select: [:visits]) - - assert [13] = - TestRepo.all(from(p in subquery(query), select: max(p.visits))) - - query = - from(p in Post, select: [:visits], order_by: [asc: :visits], limit: 2) - - assert [11] = - TestRepo.all(from(p in subquery(query), select: max(p.visits))) - - query = from(p in Post) - - assert [13] = - TestRepo.all(from(p in subquery(query), select: max(p.visits))) - - query = from(p in Post, order_by: [asc: :visits], limit: 2) - - assert [11] = - TestRepo.all(from(p in subquery(query), select: max(p.visits))) - end - - test "from: subqueries with parameters" do - TestRepo.insert!(%Post{visits: 10, text: "hello"}) - TestRepo.insert!(%Post{visits: 11, text: "hello"}) - TestRepo.insert!(%Post{visits: 13, text: "world"}) - - query = from(p in Post, where: p.visits >= ^11 and p.visits <= ^13) - - query = - from( - p in subquery(query), - where: p.text == ^"hello", - select: fragment("? + ?", p.visits, ^1) - ) - - assert [12] = TestRepo.all(query) - end - - test "join: subqueries with select source" do - %{id: id} = TestRepo.insert!(%Post{text: "hello", public: true}) - TestRepo.insert!(%Comment{post_id: id}) - - query = from(p in Post, select: p) - - assert ["hello"] = - TestRepo.all( - from( - c in Comment, - join: p in subquery(query), - on: c.post_id == p.id, - select: p.text - ) - ) - - assert [%Post{inserted_at: %NaiveDateTime{}}] = - TestRepo.all( - from( - c in Comment, - join: p in subquery(query), - on: c.post_id == p.id, - select: p - ) - ) - end - - test "join: subqueries with parameters" do - TestRepo.insert!(%Post{visits: 10, text: "hello"}) - TestRepo.insert!(%Post{visits: 11, text: "hello"}) - TestRepo.insert!(%Post{visits: 13, text: "world"}) - TestRepo.insert!(%Comment{}) - TestRepo.insert!(%Comment{}) - - query = from(p in Post, where: p.visits >= ^11 and p.visits <= ^13) - - query = - from( - c in Comment, - join: p in subquery(query), - where: p.text == ^"hello", - select: fragment("? + ?", p.visits, ^1) - ) - - assert [12, 12] = TestRepo.all(query) - end -end diff --git a/integration/mssql/sql/transaction.exs b/integration/mssql/sql/transaction.exs deleted file mode 100644 index 6aa3a07..0000000 --- a/integration/mssql/sql/transaction.exs +++ /dev/null @@ -1,290 +0,0 @@ -defmodule Ecto.Integration.TransactionTest do - # We can keep this test async as long as it - # is the only one access the transactions table - use Ecto.Integration.Case, async: true - - import Ecto.Query - # Used for writes - alias Ecto.Integration.PoolRepo - # Used for reads - alias Ecto.Integration.TestRepo - - @moduletag :capture_log - - defmodule UniqueError do - defexception message: "unique error" - end - - setup do - PoolRepo.delete_all("transactions") - :ok - end - - defmodule Trans do - use Ecto.Schema - - schema "transactions" do - field(:text, :string) - end - end - - test "transaction returns value" do - refute PoolRepo.in_transaction?() - - {:ok, val} = - PoolRepo.transaction(fn -> - assert PoolRepo.in_transaction?() - - {:ok, val} = - PoolRepo.transaction(fn -> - assert PoolRepo.in_transaction?() - 42 - end) - - assert PoolRepo.in_transaction?() - val - end) - - refute PoolRepo.in_transaction?() - assert val == 42 - end - - test "transaction re-raises" do - assert_raise UniqueError, fn -> - PoolRepo.transaction(fn -> - PoolRepo.transaction(fn -> - raise UniqueError - end) - end) - end - end - - test "transaction commits" do - PoolRepo.transaction(fn -> - e = PoolRepo.insert!(%Trans{text: "1"}) - assert [^e] = PoolRepo.all(Trans) - assert [] = TestRepo.all(Trans) - end) - - assert [%Trans{text: "1"}] = PoolRepo.all(Trans) - end - - test "transaction rolls back" do - try do - PoolRepo.transaction(fn -> - e = PoolRepo.insert!(%Trans{text: "2"}) - assert [^e] = PoolRepo.all(Trans) - assert [] = TestRepo.all(Trans) - raise UniqueError - end) - rescue - UniqueError -> :ok - end - - assert [] = TestRepo.all(Trans) - end - - test "transaction rolls back per repository" do - message = "cannot call rollback outside of transaction" - - assert_raise RuntimeError, message, fn -> - PoolRepo.rollback(:done) - end - - assert_raise RuntimeError, message, fn -> - TestRepo.transaction(fn -> - PoolRepo.rollback(:done) - end) - end - end - - test "nested transaction partial rollback" do - assert PoolRepo.transaction(fn -> - e1 = PoolRepo.insert!(%Trans{text: "3"}) - assert [^e1] = PoolRepo.all(Trans) - - try do - PoolRepo.transaction(fn -> - e2 = PoolRepo.insert!(%Trans{text: "4"}) - - assert [^e1, ^e2] = - PoolRepo.all(from(t in Trans, order_by: t.text)) - - raise UniqueError - end) - rescue - UniqueError -> :ok - end - - assert_raise DBConnection.ConnectionError, - "transaction rolling back", - fn -> PoolRepo.insert!(%Trans{text: "5"}) end - end) == {:error, :rollback} - - assert TestRepo.all(Trans) == [] - end - - test "manual rollback doesn't bubble up" do - x = - PoolRepo.transaction(fn -> - e = PoolRepo.insert!(%Trans{text: "6"}) - assert [^e] = PoolRepo.all(Trans) - PoolRepo.rollback(:oops) - end) - - assert x == {:error, :oops} - assert [] = TestRepo.all(Trans) - end - - test "manual rollback bubbles up on nested transaction" do - assert PoolRepo.transaction(fn -> - e = PoolRepo.insert!(%Trans{text: "6"}) - assert [^e] = PoolRepo.all(Trans) - - assert {:error, :oops} = - PoolRepo.transaction(fn -> - PoolRepo.rollback(:oops) - end) - - assert_raise DBConnection.ConnectionError, - "transaction rolling back", - fn -> PoolRepo.insert!(%Trans{text: "5"}) end - end) == {:error, :rollback} - - assert [] = TestRepo.all(Trans) - end - - test "transactions are not shared in repo" do - pid = self() - - new_pid = - spawn_link(fn -> - PoolRepo.transaction(fn -> - e = PoolRepo.insert!(%Trans{text: "7"}) - assert [^e] = PoolRepo.all(Trans) - send(pid, :in_transaction) - - receive do - :commit -> :ok - after - 5000 -> raise "timeout" - end - end) - - send(pid, :committed) - end) - - receive do - :in_transaction -> :ok - after - 5000 -> raise "timeout" - end - - assert [] = PoolRepo.all(Trans) - - send(new_pid, :commit) - - receive do - :committed -> :ok - after - 5000 -> raise "timeout" - end - - assert [%Trans{text: "7"}] = PoolRepo.all(Trans) - end - - ## Logging - - test "log begin, commit and rollback" do - Process.put(:on_log, &send(self(), &1)) - - PoolRepo.transaction(fn -> - assert_received %Ecto.LogEntry{params: [], result: {:ok, _}} = entry - assert is_integer(entry.query_time) and entry.query_time >= 0 - assert is_integer(entry.queue_time) and entry.queue_time >= 0 - - refute_received %Ecto.LogEntry{} - Process.put(:on_log, &send(self(), &1)) - end) - - assert_received %Ecto.LogEntry{params: [], result: {:ok, _}} = entry - assert is_integer(entry.query_time) and entry.query_time >= 0 - assert is_nil(entry.queue_time) - - assert PoolRepo.transaction(fn -> - refute_received %Ecto.LogEntry{} - Process.put(:on_log, &send(self(), &1)) - PoolRepo.rollback(:log_rollback) - end) == {:error, :log_rollback} - - assert_received %Ecto.LogEntry{params: [], result: {:ok, _}} = entry - assert is_integer(entry.query_time) and entry.query_time >= 0 - assert is_nil(entry.queue_time) - end - - test "log queries inside transactions" do - PoolRepo.transaction(fn -> - Process.put(:on_log, &send(self(), &1)) - assert [] = PoolRepo.all(Trans) - - assert_received %Ecto.LogEntry{params: [], result: {:ok, _}} = entry - assert is_integer(entry.query_time) and entry.query_time >= 0 - assert is_integer(entry.decode_time) and entry.query_time >= 0 - assert is_nil(entry.queue_time) - end) - end - - @tag :strict_savepoint - test "log raises after begin, drops transaction" do - try do - Process.put(:on_log, fn _ -> raise UniqueError end) - PoolRepo.transaction(fn -> :ok end) - rescue - UniqueError -> :ok - end - - # If it doesn't fail, the transaction was not closed properly. - catch_error(PoolRepo.query!("savepoint foobar")) - end - - test "log raises after begin, drops the whole transaction" do - try do - PoolRepo.transaction(fn -> - PoolRepo.insert!(%Trans{text: "8"}) - Process.put(:on_log, fn _ -> raise UniqueError end) - PoolRepo.transaction(fn -> flunk("log did not raise") end) - end) - rescue - UniqueError -> :ok - end - - assert [] = PoolRepo.all(Trans) - end - - test "log raises after commit, does commit" do - try do - PoolRepo.transaction(fn -> - PoolRepo.insert!(%Trans{text: "10"}) - Process.put(:on_log, fn _ -> raise UniqueError end) - end) - rescue - UniqueError -> :ok - end - - assert [%Trans{text: "10"}] = PoolRepo.all(Trans) - end - - test "log raises after rollback, does rollback" do - try do - PoolRepo.transaction(fn -> - PoolRepo.insert!(%Trans{text: "11"}) - Process.put(:on_log, fn _ -> raise UniqueError end) - PoolRepo.rollback(:rollback) - end) - rescue - UniqueError -> :ok - end - - assert [] = PoolRepo.all(Trans) - end -end diff --git a/integration/mssql/support/migration.exs b/integration/mssql/support/migration.exs deleted file mode 100644 index c6d2cfe..0000000 --- a/integration/mssql/support/migration.exs +++ /dev/null @@ -1,108 +0,0 @@ -defmodule Ecto.Integration.Migration do - use Ecto.Migration - - def change do - create table(:users, comment: "users table") do - add(:name, :string, comment: "name column") - add(:custom_id, :uuid) - timestamps() - end - - create table(:posts) do - add(:title, :string, size: 100) - add(:counter, :integer) - add(:text, :binary) - add(:bid, :binary_id) - add(:uuid, :uuid) - add(:meta, :map) - add(:links, {:map, :string}) - add(:public, :boolean) - add(:cost, :decimal, precision: 2, scale: 1) - add(:visits, :integer) - add(:intensity, :float) - add(:author_id, :integer) - add(:posted, :date) - timestamps(null: true) - end - - create table(:posts_users, primary_key: false) do - add(:post_id, references(:posts)) - add(:user_id, references(:users)) - end - - create table(:posts_users_pk) do - add(:post_id, references(:posts)) - add(:user_id, references(:users)) - timestamps() - end - - # Add a unique index on uuid. We use this - # to verify the behaviour that the index - # only matters if the UUID column is not NULL. - create(unique_index(:posts, [:uuid], comment: "posts index")) - - create table(:permalinks) do - add(:url, :string) - add(:post_id, references(:posts)) - add(:user_id, references(:users)) - end - - create table(:comments) do - add(:text, :string, size: 100) - add(:lock_version, :integer, default: 1) - add(:post_id, references(:posts)) - add(:author_id, references(:users)) - end - - create table(:customs, primary_key: false) do - add(:bid, :binary_id, primary_key: true) - add(:uuid, :uuid) - end - - create(unique_index(:customs, [:uuid], where: "uuid IS NOT NULL")) - - create table(:customs_customs, primary_key: false) do - add(:custom_id1, references(:customs, column: :bid, type: :binary_id)) - add(:custom_id2, references(:customs, column: :bid, type: :binary_id)) - end - - create table(:barebones) do - add(:num, :integer) - end - - create table(:transactions) do - add(:text, :text) - end - - create table(:lock_counters) do - add(:count, :integer) - end - - create table(:orders) do - add(:item, :map) - add(:comment_id, references(:comments)) - end - - unless :array_type in ExUnit.configuration()[:exclude] do - create table(:tags) do - add(:ints, {:array, :integer}) - add(:uuids, {:array, :uuid}, default: []) - add(:items, {:array, :map}) - end - end - - create table(:composite_pk, primary_key: false) do - add(:a, :integer, primary_key: true) - add(:b, :integer, primary_key: true) - add(:name, :string) - end - - create table(:posts_users_composite_pk) do - add(:post_id, references(:posts), primary_key: true) - add(:user_id, references(:users), primary_key: true) - timestamps() - end - - create(unique_index(:posts_users_composite_pk, [:post_id, :user_id])) - end -end diff --git a/integration/mssql/support/repo.exs b/integration/mssql/support/repo.exs deleted file mode 100644 index 1dc7720..0000000 --- a/integration/mssql/support/repo.exs +++ /dev/null @@ -1,22 +0,0 @@ -defmodule Ecto.Integration.Repo do - defmacro __using__(opts) do - quote do - config = Application.get_env(:ecto, __MODULE__) - - config = - Keyword.put(config, :loggers, [ - Ecto.LogEntry, - {Ecto.Integration.Repo, :log, [:on_log]} - ]) - - Application.put_env(:ecto, __MODULE__, config) - use Ecto.Repo, unquote(opts) - end - end - - def log(entry, key) do - on_log = Process.delete(key) || fn _ -> :ok end - on_log.(entry) - entry - end -end diff --git a/integration/mssql/support/schemas.exs b/integration/mssql/support/schemas.exs deleted file mode 100644 index b7b8b00..0000000 --- a/integration/mssql/support/schemas.exs +++ /dev/null @@ -1,385 +0,0 @@ -defmodule Ecto.Integration.Schema do - defmacro __using__(_) do - quote do - use Ecto.Schema - - type = - Application.get_env(:ecto, :primary_key_type) || - raise ":primary_key_type not set in :ecto application" - - @primary_key {:id, type, autogenerate: true} - @foreign_key_type type - @timestamps_opts [usec: false] - end - end -end - -defmodule Ecto.Integration.Post do - @moduledoc """ - This module is used to test: - - * Overall functionality - * Overall types - * Non-null timestamps - * Relationships - * Dependent callbacks - - """ - use Ecto.Integration.Schema - import Ecto.Changeset - - schema "posts" do - # Same as integer - field(:counter, :id) - field(:title, :string) - field(:text, :binary) - field(:temp, :string, default: "temp", virtual: true) - field(:public, :boolean, default: true) - field(:cost, :decimal) - field(:visits, :integer) - field(:intensity, :float) - field(:bid, :binary_id) - field(:uuid, Ecto.UUID, autogenerate: true) - field(:meta, :map) - field(:links, {:map, :string}) - field(:posted, :date) - - has_many( - :comments, - Ecto.Integration.Comment, - on_delete: :delete_all, - on_replace: :delete - ) - - has_one( - :permalink, - Ecto.Integration.Permalink, - on_delete: :delete_all, - on_replace: :delete - ) - - has_one( - :update_permalink, - Ecto.Integration.Permalink, - foreign_key: :post_id, - on_delete: :delete_all, - on_replace: :update - ) - - has_many(:comments_authors, through: [:comments, :author]) - belongs_to(:author, Ecto.Integration.User) - - many_to_many( - :users, - Ecto.Integration.User, - join_through: "posts_users", - on_delete: :delete_all, - on_replace: :delete - ) - - many_to_many( - :unique_users, - Ecto.Integration.User, - join_through: "posts_users", - unique: true - ) - - many_to_many( - :constraint_users, - Ecto.Integration.User, - join_through: Ecto.Integration.PostUserCompositePk - ) - - has_many(:users_comments, through: [:users, :comments]) - - has_many( - :comments_authors_permalinks, - through: [:comments_authors, :permalink] - ) - - timestamps() - has_one(:post_user_composite_pk, Ecto.Integration.PostUserCompositePk) - end - - def changeset(schema, params) do - cast(schema, params, ~w(counter title text temp public cost visits - intensity bid uuid meta posted)) - end -end - -defmodule Ecto.Integration.PostUsecTimestamps do - @moduledoc """ - This module is used to test: - - * Usec timestamps - - """ - use Ecto.Integration.Schema - - schema "posts" do - field(:title, :string) - timestamps(usec: true) - end -end - -defmodule Ecto.Integration.Comment do - @moduledoc """ - This module is used to test: - - * Optimistic lock - * Relationships - * Dependent callbacks - - """ - use Ecto.Integration.Schema - - schema "comments" do - field(:text, :string) - field(:lock_version, :integer, default: 1) - belongs_to(:post, Ecto.Integration.Post) - belongs_to(:author, Ecto.Integration.User) - has_one(:post_permalink, through: [:post, :permalink]) - end - - def changeset(schema, params) do - Ecto.Changeset.cast(schema, params, [:text]) - end -end - -defmodule Ecto.Integration.Permalink do - @moduledoc """ - This module is used to test: - - * Relationships - * Dependent callbacks - - """ - use Ecto.Integration.Schema - - schema "permalinks" do - field(:url, :string) - belongs_to(:post, Ecto.Integration.Post, on_replace: :nilify) - - belongs_to( - :update_post, - Ecto.Integration.Post, - on_replace: :update, - foreign_key: :post_id, - define_field: false - ) - - belongs_to(:user, Ecto.Integration.User) - has_many(:post_comments_authors, through: [:post, :comments_authors]) - end - - def changeset(schema, params) do - Ecto.Changeset.cast(schema, params, [:url]) - end -end - -defmodule Ecto.Integration.PostUser do - @moduledoc """ - This module is used to test: - - * Many to many associations join_through with schema - - """ - use Ecto.Integration.Schema - - schema "posts_users_pk" do - belongs_to(:user, Ecto.Integration.User) - belongs_to(:post, Ecto.Integration.Post) - timestamps() - end -end - -defmodule Ecto.Integration.User do - @moduledoc """ - This module is used to test: - - * UTC Timestamps - * Relationships - * Dependent callbacks - - """ - use Ecto.Integration.Schema - - schema "users" do - field(:name, :string) - - has_many( - :comments, - Ecto.Integration.Comment, - foreign_key: :author_id, - on_delete: :nilify_all, - on_replace: :nilify - ) - - has_one(:permalink, Ecto.Integration.Permalink, on_replace: :nilify) - - has_many( - :posts, - Ecto.Integration.Post, - foreign_key: :author_id, - on_delete: :nothing, - on_replace: :delete - ) - - belongs_to( - :custom, - Ecto.Integration.Custom, - references: :bid, - type: :binary_id - ) - - many_to_many( - :schema_posts, - Ecto.Integration.Post, - join_through: Ecto.Integration.PostUser - ) - - many_to_many( - :unique_posts, - Ecto.Integration.Post, - join_through: Ecto.Integration.PostUserCompositePk - ) - - timestamps(type: :utc_datetime) - end -end - -defmodule Ecto.Integration.Custom do - @moduledoc """ - This module is used to test: - - * binary_id primary key - * Tying another schemas to an existing schema - - Due to the second item, it must be a subset of posts. - """ - use Ecto.Integration.Schema - - @primary_key {:bid, :binary_id, autogenerate: true} - schema "customs" do - field(:uuid, Ecto.UUID) - - many_to_many( - :customs, - Ecto.Integration.Custom, - join_through: "customs_customs", - join_keys: [custom_id1: :bid, custom_id2: :bid], - on_delete: :delete_all, - on_replace: :delete - ) - end -end - -defmodule Ecto.Integration.Barebone do - @moduledoc """ - This module is used to test: - - * A schema without primary keys - - """ - use Ecto.Integration.Schema - - @primary_key false - schema "barebones" do - field(:num, :integer) - end -end - -defmodule Ecto.Integration.Tag do - @moduledoc """ - This module is used to test: - - * The array type - * Embedding many schemas (uses array) - - """ - use Ecto.Integration.Schema - - schema "tags" do - field(:ints, {:array, :integer}) - field(:uuids, {:array, Ecto.UUID}) - embeds_many(:items, Ecto.Integration.Item) - end -end - -defmodule Ecto.Integration.Item do - @moduledoc """ - This module is used to test: - - * Embedding - - """ - use Ecto.Schema - - embedded_schema do - field(:price, :integer) - field(:valid_at, :date) - end -end - -defmodule Ecto.Integration.Order do - @moduledoc """ - This module is used to test: - - * Embedding one schema - - """ - use Ecto.Integration.Schema - - schema "orders" do - embeds_one(:item, Ecto.Integration.Item) - end -end - -defmodule Ecto.Integration.CompositePk do - @moduledoc """ - This module is used to test: - - * Composite primary keys - - """ - use Ecto.Integration.Schema - - @primary_key false - schema "composite_pk" do - field(:a, :integer, primary_key: true) - field(:b, :integer, primary_key: true) - field(:name, :string) - end -end - -defmodule Ecto.Integration.CorruptedPk do - @moduledoc """ - This module is used to test: - - * Primary keys that is not unique on a DB side - - """ - use Ecto.Integration.Schema - - @primary_key false - schema "corrupted_pk" do - field(:a, :string, primary_key: true) - end -end - -defmodule Ecto.Integration.PostUserCompositePk do - @moduledoc """ - This module is used to test: - - * Composite primary keys for 2 belongs_to fields - - """ - use Ecto.Integration.Schema - - @primary_key false - schema "posts_users_composite_pk" do - belongs_to(:user, Ecto.Integration.User, primary_key: true) - belongs_to(:post, Ecto.Integration.Post, primary_key: true) - timestamps() - end -end diff --git a/integration/mssql/test_helper.exs b/integration/mssql/test_helper.exs index 7dd016c..f8aee77 100644 --- a/integration/mssql/test_helper.exs +++ b/integration/mssql/test_helper.exs @@ -1,80 +1,80 @@ Logger.configure(level: :info) -ExUnit.start( - exclude: [ - :array_type, - :map_type, - :uses_usec, - :uses_msec, - :modify_foreign_key_on_update, - :create_index_if_not_exists, - :not_supported_by_sql_server, - :upsert, - :upsert_all, - :identity_insert - ] -) - # Configure Ecto for support and tests +System.put_env("MSSQL_UID", "sa") +System.put_env("MSSQL_PWD", "ThePa$$word") Application.put_env(:ecto, :primary_key_type, :id) -Application.put_env(:ecto, :lock_for_update, "FOR UPDATE") -# Load support files -Code.require_file("./support/repo.exs", __DIR__) -Code.require_file("./support/schemas.exs", __DIR__) -Code.require_file("./support/migration.exs", __DIR__) - -pool = - case System.get_env("ECTO_POOL") || "poolboy" do - "poolboy" -> DBConnection.Poolboy - "sbroker" -> DBConnection.Sojourn - end +Application.put_env(:ecto, :async_integration_tests, false) +Application.put_env(:ecto_sql, :lock_for_update, " with (updlock) ") + +# support paths +ecto = Mix.Project.deps_paths()[:ecto] +ecto_support = ecto <> "/integration_test/support/" +ecto_sql = Mix.Project.deps_paths()[:ecto_sql] +ecto_sql_support = ecto_sql <> "/integration_test/support/" + +Code.require_file(ecto_sql_support <> "repo.exs", __DIR__) + +# Configure mssql connection +Application.put_env(:ecto_sql, :database, "mssql_ecto_integration_test") # Basic test repo alias Ecto.Integration.TestRepo Application.put_env( - :ecto, + :ecto_sql, TestRepo, adapter: MssqlEcto, username: System.get_env("MSSQL_UID"), password: System.get_env("MSSQL_PWD"), + hostname: System.get_env("MSSQL_HST") || "localhost", database: "mssql_ecto_integration_test", - pool: Ecto.Adapters.SQL.Sandbox, - ownership_pool: pool + pool: Ecto.Adapters.SQL.Sandbox ) defmodule Ecto.Integration.TestRepo do - use Ecto.Integration.Repo, otp_app: :ecto + use Ecto.Integration.Repo, + otp_app: :ecto_sql, + adapter: MssqlEcto + + def create_prefix(prefix) do + "create database #{prefix}" + end + + def drop_prefix(prefix) do + "drop database #{prefix}" + end + + def uuid do + Ecto.UUID + end end # Pool repo for transaction and lock tests alias Ecto.Integration.PoolRepo Application.put_env( - :ecto, + :ecto_sql, PoolRepo, adapter: MssqlEcto, - pool: pool, username: System.get_env("MSSQL_UID"), password: System.get_env("MSSQL_PWD"), database: "mssql_ecto_integration_test", - pool_size: 10, - max_restarts: 20, - max_seconds: 10 + pool_size: 10 + #max_restarts: 20, + #max_seconds: 10 ) defmodule Ecto.Integration.PoolRepo do - use Ecto.Integration.Repo, otp_app: :ecto - - def create_prefix(prefix) do - "create schema #{prefix}" - end - - def drop_prefix(prefix) do - "drop schema #{prefix}" - end + use Ecto.Integration.Repo, + otp_app: :ecto_sql, + adapter: MssqlEcto end +# Load support files +Code.require_file(ecto_support <> "schemas.exs", __DIR__) +Code.require_file(ecto_sql_support <> "migration.exs", __DIR__) + defmodule Ecto.Integration.Case do use ExUnit.CaseTemplate @@ -83,7 +83,7 @@ defmodule Ecto.Integration.Case do end end -{:ok, _} = MssqlEcto.ensure_all_started(TestRepo, :temporary) +{:ok, _} = MssqlEcto.ensure_all_started(TestRepo.config(), :temporary) # Load up the repository, start it, and run migrations _ = MssqlEcto.storage_down(TestRepo.config()) @@ -92,6 +92,24 @@ _ = MssqlEcto.storage_down(TestRepo.config()) {:ok, _pid} = TestRepo.start_link() {:ok, _pid} = PoolRepo.start_link() +# excludes +excludes = [ + :array_type, + :map_type, + :uses_usec, + :uses_msec, + :modify_foreign_key_on_update, + :create_index_if_not_exists, + :not_supported_by_sql_server, + :upsert, + :upsert_all, + :identity_insert +] + +ExUnit.configure(exclude: excludes) + :ok = Ecto.Migrator.up(TestRepo, 0, Ecto.Integration.Migration, log: false) Ecto.Adapters.SQL.Sandbox.mode(TestRepo, :manual) Process.flag(:trap_exit, true) + +ExUnit.start() diff --git a/integration/mssql/type_parser_test.exs b/integration/mssql/type_parser_test.exs new file mode 100644 index 0000000..ce13c98 --- /dev/null +++ b/integration/mssql/type_parser_test.exs @@ -0,0 +1,28 @@ +defmodule Ecto.Integration.TypeParserTest do + use Ecto.Integration.Case, async: Application.get_env(:ecto, :async_integration_tests, true) + + alias Ecto.Integration.TestRepo + import Ecto.Query + + alias Ecto.Integration.Post + alias Ecto.Integration.Comment + alias Ecto.Integration.Permalink + alias Ecto.Integration.User + alias Ecto.Integration.PostUserCompositePk + + test "joins with column alias" do + _p = TestRepo.insert!(%Post{title: "1"}) + p2 = TestRepo.insert!(%Post{title: "2"}) + c1 = TestRepo.insert!(%Permalink{url: "1", post_id: p2.id}) + + query = + from(p in Post, + join: c in assoc(p, :permalink), + on: c.id == ^c1.id, + select: %{post_id: p.id, link_id: c.id} + ) + + expected = %{link_id: c1.id, post_id: p2.id} + assert [expected] = TestRepo.all(query) + end +end diff --git a/integration/support/file_helpers.exs b/integration/support/file_helpers.exs new file mode 100644 index 0000000..947ff49 --- /dev/null +++ b/integration/support/file_helpers.exs @@ -0,0 +1,43 @@ +defmodule Support.FileHelpers do + import ExUnit.Assertions + + @doc """ + Returns the `tmp_path` for tests. + """ + def tmp_path do + Path.expand("../../tmp", __DIR__) + end + + @doc """ + Executes the given function in a temp directory + tailored for this test case and test. + """ + defmacro in_tmp(fun) do + path = Path.join([tmp_path(), "#{__CALLER__.module}", "#{elem(__CALLER__.function, 0)}"]) + quote do + path = unquote(path) + File.rm_rf!(path) + File.mkdir_p!(path) + File.cd!(path, fn -> unquote(fun).(path) end) + end + end + + @doc """ + Asserts a file was generated. + """ + def assert_file(file) do + assert File.regular?(file), "Expected #{file} to exist, but does not" + end + + @doc """ + Asserts a file was generated and that it matches a given pattern. + """ + def assert_file(file, callback) when is_function(callback, 1) do + assert_file(file) + callback.(File.read!(file)) + end + + def assert_file(file, match) do + assert_file file, &(assert &1 =~ match) + end +end diff --git a/integration/support/migration.exs b/integration/support/migration.exs new file mode 100644 index 0000000..1efd8a3 --- /dev/null +++ b/integration/support/migration.exs @@ -0,0 +1,121 @@ +defmodule Ecto.Integration.Migration do + use Ecto.Migration + + def change do + create table(:users, comment: "users table") do + add :name, :string, comment: "name column" + add :custom_id, :uuid + timestamps() + end + + create table(:posts) do + add :title, :string, size: 100 + add :counter, :integer + add :text, :binary + add :bid, :binary_id + add :uuid, :uuid + add :meta, :map + add :links, {:map, :string} + add :intensities, {:map, :float} + add :public, :boolean + add :cost, :decimal, precision: 2, scale: 1 + add :visits, :integer + add :intensity, :float + add :author_id, :integer + add :posted, :date + timestamps(null: true) + end + + create table(:posts_users, primary_key: false) do + add :post_id, references(:posts) + add :user_id, references(:users) + end + + create table(:posts_users_pk) do + add :post_id, references(:posts) + add :user_id, references(:users) + timestamps() + end + + # Add a unique index on uuid. We use this + # to verify the behaviour that the index + # only matters if the UUID column is not NULL. + create unique_index(:posts, [:uuid], comment: "posts index") + + create table(:permalinks) do + add :uniform_resource_locator, :string + add :post_id, references(:posts) + add :user_id, references(:users) + end + + create unique_index(:permalinks, [:uniform_resource_locator]) + + create table(:comments) do + add :text, :string, size: 100 + add :lock_version, :integer, default: 1 + add :post_id, references(:posts) + add :author_id, references(:users) + end + + create table(:customs, primary_key: false) do + add :bid, :binary_id, primary_key: true + add :uuid, :uuid + end + + create unique_index(:customs, [:uuid]) + + create table(:customs_customs, primary_key: false) do + add :custom_id1, references(:customs, column: :bid, type: :binary_id) + add :custom_id2, references(:customs, column: :bid, type: :binary_id) + end + + create table(:barebones) do + add :num, :integer + end + + create table(:transactions) do + add :num, :integer + end + + create table(:lock_counters) do + add :count, :integer + end + + create table(:orders) do + add :instructions, :text + add :item, :map + add :permalink_id, references(:permalinks) + end + + unless :array_type in ExUnit.configuration[:exclude] do + create table(:tags) do + add :ints, {:array, :integer} + add :uuids, {:array, :uuid}, default: [] + add :items, {:array, :map} + end + end + + create table(:composite_pk, primary_key: false) do + add :a, :integer, primary_key: true + add :b, :integer, primary_key: true + add :name, :string + end + + create table(:corrupted_pk, primary_key: false) do + add :a, :string + end + + create table(:posts_users_composite_pk) do + add :post_id, references(:posts), primary_key: true + add :user_id, references(:users), primary_key: true + timestamps() + end + + create unique_index(:posts_users_composite_pk, [:post_id, :user_id]) + + create table(:usecs) do + add :naive_datetime_usec, :naive_datetime_usec + add :utc_datetime_usec, :utc_datetime_usec + end + end +end diff --git a/integration/support/repo.exs b/integration/support/repo.exs new file mode 100644 index 0000000..f17c838 --- /dev/null +++ b/integration/support/repo.exs @@ -0,0 +1,23 @@ +defmodule Ecto.Integration.Repo do + defmacro __using__(opts) do + quote do + use Ecto.Repo, unquote(opts) + + @query_event __MODULE__ + |> Module.split() + |> Enum.map(& &1 |> Macro.underscore() |> String.to_atom()) + |> Kernel.++([:query]) + + def init(_, opts) do + fun = &Ecto.Integration.Repo.handle_event/4 + :telemetry.attach_many(__MODULE__, [[:custom], @query_event], fun, :ok) + {:ok, opts} + end + end + end + + def handle_event(event, latency, metadata, _config) do + handler = Process.delete(:telemetry) || fn _, _, _ -> :ok end + handler.(event, latency, metadata) + end +end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs deleted file mode 100644 index 2f851ac..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration49 do - use Ecto.Migration - - def up do - update(&[49 | &1]) - end - - def down do - update(&List.delete(&1, 49)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs deleted file mode 100644 index e484bbb..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration50 do - use Ecto.Migration - - def up do - update(&[50 | &1]) - end - - def down do - update(&List.delete(&1, 50)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/47_migration_47.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/47_migration_47.exs deleted file mode 100644 index c77ea63..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/47_migration_47.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration47 do - use Ecto.Migration - - def up do - update(&[47 | &1]) - end - - def down do - update(&List.delete(&1, 47)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/48_migration_48.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/48_migration_48.exs deleted file mode 100644 index 2b5a437..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test run up to/step migration/48_migration_48.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration48 do - use Ecto.Migration - - def up do - update(&[48 | &1]) - end - - def down do - update(&List.delete(&1, 48)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/53_migration_53.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/53_migration_53.exs deleted file mode 100644 index edc2a7c..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/53_migration_53.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration53 do - use Ecto.Migration - - def up do - update(&[53 | &1]) - end - - def down do - update(&List.delete(&1, 53)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/54_migration_54.exs b/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/54_migration_54.exs deleted file mode 100644 index 1339d1b..0000000 --- a/integration/tmp/Elixir.Ecto.Integration.MigratorTest/test runs all migrations/54_migration_54.exs +++ /dev/null @@ -1,15 +0,0 @@ -defmodule Elixir.Ecto.Integration.MigratorTest.Migration54 do - use Ecto.Migration - - def up do - update(&[54 | &1]) - end - - def down do - update(&List.delete(&1, 54)) - end - - defp update(fun) do - Process.put(:migrations, fun.(Process.get(:migrations) || [])) - end -end diff --git a/lib/_mssql_ecto.ex b/lib/_mssql_ecto.ex new file mode 100644 index 0000000..0de0189 --- /dev/null +++ b/lib/_mssql_ecto.ex @@ -0,0 +1,225 @@ +defmodule MssqlEcto do + @moduledoc false + # Inherit all behaviour from Ecto.Adapters.SQL + use Ecto.Adapters.SQL, + driver: :mssqlex, + migration_lock: " with (updlock) " + + # And provide a custom storage implementation + @behaviour Ecto.Adapter.Storage + @behaviour Ecto.Adapter.Structure + + @doc """ + All Ecto extensions for Mssqlex. + """ + def extensions do + [] + end + + # Support arrays in place of IN + @impl true + def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)] + def dumpers({:map, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)] + def dumpers({:in, sub}, {:in, sub}), do: [{:array, sub}] + def dumpers(:binary_id, type), do: [type, Ecto.UUID] + def dumpers(_, type), do: [type] + + ## Storage API + + @impl true + def storage_up(opts) do + database = + Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration" + + opts = Keyword.put(opts, :database, nil) + + command = + ~s(CREATE DATABASE #{database}) + |> concat_if(opts[:collation], &"COLLATE '#{&1}'") + |> concat_if(opts[:template], &"TEMPLATE=#{&1}") + |> concat_if(opts[:lc_ctype], &"LC_CTYPE='#{&1}'") + |> concat_if(opts[:lc_collate], &"LC_COLLATE='#{&1}'") + + case run_query(command, opts) do + {:ok, _} -> + :ok + + {:error, %{odbc_code: :database_already_exists}} -> + {:error, :already_up} + + {:error, error} -> + {:error, Exception.message(error)} + end + end + + defp concat_if(content, nil, _fun), do: content + defp concat_if(content, value, fun), do: content <> " " <> fun.(value) + + @impl true + def storage_down(opts) do + database = + Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration" + + command = "DROP DATABASE #{database}" + opts = Keyword.put(opts, :database, nil) + + case run_query(command, opts) do + {:ok, _} -> + :ok + + {:error, %{odbc_code: :base_table_or_view_not_found}} -> + {:error, :already_down} + + {:error, error} -> + {:error, Exception.message(error)} + end + end + + @impl true + def supports_ddl_transaction? do + true + end + + @impl true + def structure_dump(default, config) do + table = config[:migration_source] || "schema_migrations" + + with {:ok, versions} <- select_versions(table, config), + {:ok, path} <- pg_dump(default, config), + do: append_versions(table, versions, path) + end + + defp select_versions(table, config) do + case run_query(~s[SELECT version FROM public."#{table}" ORDER BY version], config) do + {:ok, %{rows: rows}} -> {:ok, Enum.map(rows, &hd/1)} + {:error, %{mssql: %{code: :undefined_table}}} -> {:ok, []} + {:error, _} = error -> error + end + end + + # TODO this is for postgres, not mssql + defp pg_dump(default, config) do + path = config[:dump_path] || Path.join(default, "structure.sql") + File.mkdir_p!(Path.dirname(path)) + + case run_with_cmd("pg_dump", config, [ + "--file", + path, + "--schema-only", + "--no-acl", + "--no-owner", + config[:database] + ]) do + {_output, 0} -> + {:ok, path} + + {output, _} -> + {:error, output} + end + end + + defp append_versions(_table, [], path) do + {:ok, path} + end + + defp append_versions(table, versions, path) do + sql = + ~s[INSERT INTO public."#{table}" (version) VALUES ] <> + Enum.map_join(versions, ", ", &"(#{&1})") <> ~s[;\n\n] + + File.open!(path, [:append], fn file -> + IO.write(file, sql) + end) + + {:ok, path} + end + + @impl true + def structure_load(default, config) do + path = config[:dump_path] || Path.join(default, "structure.sql") + + args = [ + "--quiet", + "--file", + path, + "-vON_ERROR_STOP=1", + "--single-transaction", + config[:database] + ] + + case run_with_cmd("psql", config, args) do + {_output, 0} -> {:ok, path} + {output, _} -> {:error, output} + end + end + + ## Helpers + + defp run_query(sql, opts) do + {:ok, _} = Application.ensure_all_started(:mssqlex) + + opts = + opts + |> Keyword.drop([:name, :log, :pool, :pool_size]) + |> Keyword.put(:backoff_type, :stop) + |> Keyword.put(:max_restarts, 0) + + {:ok, pid} = Task.Supervisor.start_link() + + task = + Task.Supervisor.async_nolink(pid, fn -> + {:ok, conn} = Mssqlex.start_link(opts) + + value = Mssqlex.query(conn, sql, [], opts) + GenServer.stop(conn) + value + end) + + timeout = Keyword.get(opts, :timeout, 15_000) + + task_return = Task.yield(task, timeout) || Task.shutdown(task) + + case task_return do + {:ok, {:ok, result}} -> + {:ok, result} + + {:ok, {:error, error}} -> + {:error, error} + + {:exit, {%{__struct__: struct} = error, _}} + when struct in [Mssqlex.Error, DBConnection.Error] -> + {:error, error} + + {:exit, reason} -> + {:error, RuntimeError.exception(Exception.format_exit(reason))} + + nil -> + {:error, RuntimeError.exception("command timed out")} + end + end + + defp run_with_cmd(cmd, opts, opt_args) do + unless System.find_executable(cmd) do + raise "could not find executable `#{cmd}` in path, " <> + "please guarantee it is available before running ecto commands" + end + + env = [{"PGCONNECT_TIMEOUT", "10"}] + + env = + if password = opts[:password] do + [{"PGPASSWORD", password} | env] + else + env + end + + args = [] + args = if username = opts[:username], do: ["-U", username | args], else: args + args = if port = opts[:port], do: ["-p", to_string(port) | args], else: args + + host = opts[:hostname] || System.get_env("PGHOST") || "localhost" + args = ["--host", host | args] + args = args ++ opt_args + System.cmd(cmd, args, env: env, stderr_to_stdout: true) + end +end diff --git a/lib/connection/_connection.ex b/lib/connection/_connection.ex new file mode 100644 index 0000000..248ea3f --- /dev/null +++ b/lib/connection/_connection.ex @@ -0,0 +1,157 @@ +if Code.ensure_loaded?(Mssqlex) do + defmodule MssqlEcto.Connection do + @moduledoc false + + @default_port 1433 + @behaviour Ecto.Adapters.SQL.Connection + + alias MssqlEcto.Connection.{DDL, Query} + + import MssqlEcto.Connection.Helper + require Logger + + ## Module and Options + + @impl true + + @spec child_spec(Keyword.t()) :: Supervisor.Spec.spec() + def child_spec(opts) do + opts + |> Keyword.put_new(:port, @default_port) + |> Mssqlex.child_spec() + end + + @impl true + def to_constraints(%Mssqlex.Error{} = error), do: error.constraint_violations + + ## Query + @impl true + def prepare_execute(conn, name, sql, params, opts) do + Mssqlex.prepare_execute(conn, name, sql, params, opts) + end + + @impl true + def query(conn, sql, params, opts) do + Mssqlex.query(conn, sql, params, opts) + end + + @impl true + def execute(conn, %{ref: ref} = query, params, opts) do + case DBConnection.execute(conn, query, params, opts) do + {:ok, %{ref: ^ref}, result} -> + {:ok, result} + + {:ok, _, _} = ok -> + ok + + {:error, %Mssqlex.Error{} = err} -> + {:reset, err} + + {:error, %Mssqlex.Error{odbc_code: :feature_not_supported} = err} -> + {:reset, err} + + {:error, _} = error -> + error + end + end + + @impl true + def stream(conn, sql, params, opts) do + Mssqlex.stream(conn, sql, params, opts) + end + + # query + @impl true + def all(query), do: Query.all(query) + + @impl true + def update_all(query, prefix \\ nil), do: Query.update_all(query, prefix) + + @impl true + def delete_all(query), do: Query.delete_all(query) + + @impl true + def insert(prefix, table, header, rows, on_conflict, returning) do + values = + if header == [] do + [ + Query.output(returning, "INSERTED"), + " DEFAULT VALUES " + | intersperse_map(rows, ?,, fn _ -> "" end) + ] + else + [ + ?\s, + ?(, + intersperse_map(header, ?,, "e_name/1), + ")", + Query.output(returning, "INSERTED"), + " VALUES " | Query.insert_all(rows, 1) + ] + end + + [ + "INSERT INTO ", + quote_table(prefix, table), + Query.insert_as(on_conflict), + values, + Query.on_conflict(on_conflict, header) + ] + end + + @impl true + def update(prefix, table, fields, filters, returning) do + {fields, count} = + intersperse_reduce(fields, ", ", 1, fn field, acc -> + {[quote_name(field), " = ?"], acc + 1} + end) + + {filters, _count} = intersperse_reduce(filters, " AND ", count, &condition_reducer/2) + + [ + "UPDATE ", + quote_table(prefix, table), + " SET ", + fields, + Query.output(returning, "INSERTED"), + " WHERE ", + filters + ] + end + + @impl true + def delete(prefix, table, filters, returning) do + {filters, _} = intersperse_reduce(filters, " AND ", 1, &condition_reducer/2) + + [ + "DELETE FROM ", + quote_table(prefix, table), + Query.output(returning, "DELETED"), + " WHERE ", + filters + ] + end + + defp condition_reducer({field, nil}, acc) do + {[quote_name(field), " IS NULL"], acc} + end + + defp condition_reducer({field, _value}, acc) do + {[quote_name(field), " = ?"], acc + 1} + end + + defp condition_reducer(field, acc) do + {[quote_name(field), " = ?"], acc + 1} + end + + # DDL + @impl true + def execute_ddl(args), do: DDL.execute(args) + + @impl true + def ddl_logs(result), do: DDL.logs(result) + + @impl true + def table_exists_query(table), do: DDL.table_exists_query(table) + end +end diff --git a/lib/mssql_ecto/migration.ex b/lib/connection/ddl.ex similarity index 84% rename from lib/mssql_ecto/migration.ex rename to lib/connection/ddl.ex index b4c0b5f..18f824c 100644 --- a/lib/mssql_ecto/migration.ex +++ b/lib/connection/ddl.ex @@ -1,16 +1,46 @@ -defmodule MssqlEcto.Migration do +defmodule MssqlEcto.Connection.DDL do + @moduledoc false alias Ecto.Migration.{Table, Index, Reference, Constraint} + import MssqlEcto.Connection.Helper - import MssqlEcto.Helpers - + @creates [:create, :create_if_not_exists] @drops [:drop, :drop_if_exists] - @doc """ - Receives a DDL command and returns a query that executes it. - """ - @spec execute_ddl(command :: Ecto.Adapter.Migration.command()) :: String.t() - def execute_ddl({command, %Table{} = table, columns}) - when command in [:create, :create_if_not_exists] do + def logs(%Mssqlex.Result{} = result) do + messages = + case result do + %{messages: messages} -> + messages + + _ -> + [] + end + + for message <- messages do + %{message: message, severity: severity} = message + + {ddl_log_level(severity), message, []} + end + end + + # TODO these are for Postgres, maybe irrelevant for MSSQL + defp ddl_log_level("DEBUG"), do: :debug + defp ddl_log_level("LOG"), do: :info + defp ddl_log_level("INFO"), do: :info + defp ddl_log_level("NOTICE"), do: :info + defp ddl_log_level("WARNING"), do: :warn + defp ddl_log_level("ERROR"), do: :error + defp ddl_log_level("FATAL"), do: :error + defp ddl_log_level("PANIC"), do: :error + defp ddl_log_level(_severity), do: :info + + def table_exists_query(table) do + {"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = ?", [table]} + end + + # execute dll (all MSSQL from here on) + def execute({command, %Table{} = table, columns}) + when command in @creates do query = [ if_do( command == :create_if_not_exists, @@ -29,7 +59,7 @@ defmodule MssqlEcto.Migration do [query] end - def execute_ddl({command, %Table{} = table}) when command in @drops do + def execute({command, %Table{} = table}) when command in @drops do [ [ if_do( @@ -42,7 +72,7 @@ defmodule MssqlEcto.Migration do ] end - def execute_ddl({:alter, %Table{} = table, changes}) do + def execute({:alter, %Table{} = table, changes}) do query = [ column_changes(table, changes), quote_alter(pk_definition(changes, " ADD ", table), table) @@ -51,7 +81,7 @@ defmodule MssqlEcto.Migration do [query] end - def execute_ddl({:create, %Index{} = index}) do + def execute({:create, %Index{} = index}) do fields = intersperse_map(index.columns, ", ", &index_expr/1) queries = [ @@ -73,11 +103,11 @@ defmodule MssqlEcto.Migration do queries end - def execute_ddl({:create_if_not_exists, %Index{} = _index}) do + def execute({:create_if_not_exists, %Index{} = _index}) do raise("create index if not exists: not supported") end - def execute_ddl({command, %Index{} = index}) when command in @drops do + def execute({command, %Index{} = index}) when command in @drops do if_exists = if command == :drop_if_exists, do: "IF EXISTS ", else: [] [ @@ -91,7 +121,7 @@ defmodule MssqlEcto.Migration do ] end - def execute_ddl({:rename, %Table{} = current_table, %Table{} = new_table}) do + def execute({:rename, %Table{} = current_table, %Table{} = new_table}) do [ [ "EXEC sp_rename ", @@ -103,7 +133,7 @@ defmodule MssqlEcto.Migration do ] end - def execute_ddl({:rename, %Table{} = table, current_column, new_column}) do + def execute({:rename, %Table{} = table, current_column, new_column}) do [ [ "EXEC sp_rename ", @@ -115,7 +145,7 @@ defmodule MssqlEcto.Migration do ] end - def execute_ddl({:create, %Constraint{} = constraint}) do + def execute({:create, %Constraint{} = constraint}) do queries = [ [ "ALTER TABLE ", @@ -128,7 +158,7 @@ defmodule MssqlEcto.Migration do queries end - def execute_ddl({:drop, %Constraint{} = constraint}) do + def execute({:drop, %Constraint{} = constraint}) do [ [ "ALTER TABLE ", @@ -139,9 +169,9 @@ defmodule MssqlEcto.Migration do ] end - def execute_ddl(string) when is_binary(string), do: [string] + def execute(string) when is_binary(string), do: [string] - def execute_ddl(keyword) when is_list(keyword), + def execute(keyword) when is_list(keyword), do: error!(nil, "MSSQL adapter does not support keyword lists in execute") @doc false @@ -199,8 +229,7 @@ defmodule MssqlEcto.Migration do end defp column_changes(table, columns) do - {additions, changes} = - Enum.split_with(columns, fn val -> elem(val, 0) == :add end) + {additions, changes} = Enum.split_with(columns, fn val -> elem(val, 0) == :add end) [ if_do(additions !== [], column_additions(additions, table)), diff --git a/lib/mssql_ecto/helpers.ex b/lib/connection/helper.ex similarity index 64% rename from lib/mssql_ecto/helpers.ex rename to lib/connection/helper.ex index 4900096..f3bb5a0 100644 --- a/lib/mssql_ecto/helpers.ex +++ b/lib/connection/helper.ex @@ -1,9 +1,20 @@ -defmodule MssqlEcto.Helpers do - alias MssqlEcto.QueryString +defmodule MssqlEcto.Connection.Helper do + @moduledoc false + + alias MssqlEcto.Connection.Query.Expression + require Logger def get_source(query, sources, ix, source) do {expr, name, _schema} = elem(sources, ix) - {expr || QueryString.paren_expr(source, sources, query), name} + {expr || Expression.expr(source, sources, query), name} + end + + def add_prefix(nil, name) do + name + end + + def add_prefix(prefix, name) do + [quote_name(prefix), ".", name] end def quote_qualified_name(name, sources, ix) do @@ -43,23 +54,32 @@ defmodule MssqlEcto.Helpers do [wrapper, value, wrapper] end - def quote_table(prefix, name) - def quote_table(nil, name), do: quote_name(name) + def quote_table(nil, name), do: quote_table(name) + def quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)] - def quote_table(prefix, name), - do: intersperse_map([prefix, name], ?., "e_name/1) + def quote_table(name) when is_atom(name), + do: quote_table(Atom.to_string(name)) - def single_quote(value), do: value |> escape_string |> wrap_in(?') + def quote_table(name) do + if String.contains?(name, "\"") do + error!(nil, "bad table name #{inspect(name)}") + end + + [?", name, ?"] + end + + def single_quote(value), do: [?', escape_string(value), ?'] def intersperse_map(list, separator, mapper, acc \\ []) - def intersperse_map([], _separator, _mapper, acc), do: acc + + def intersperse_map([], _separator, _mapper, acc), + do: acc def intersperse_map([elem], _separator, mapper, acc), do: [acc | mapper.(elem)] def intersperse_map([elem | rest], separator, mapper, acc), - do: - intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator]) + do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator]) def intersperse_reduce(list, separator, user_acc, reducer, acc \\ []) @@ -73,12 +93,7 @@ defmodule MssqlEcto.Helpers do def intersperse_reduce([elem | rest], separator, user_acc, reducer, acc) do {elem, user_acc} = reducer.(elem, user_acc) - - intersperse_reduce(rest, separator, user_acc, reducer, [ - acc, - elem, - separator - ]) + intersperse_reduce(rest, separator, user_acc, reducer, [acc, elem, separator]) end def if_do(condition, value) do @@ -90,7 +105,7 @@ defmodule MssqlEcto.Helpers do end def ecto_to_db({:array, t}), do: [ecto_to_db(t), ?[, ?]] - def ecto_to_db(:id), do: "int" + def ecto_to_db(:id), do: "int identity(1,1)" def ecto_to_db(:serial), do: "int identity(1,1)" def ecto_to_db(:bigserial), do: "bigint identity(1,1)" def ecto_to_db(:binary_id), do: "char(36)" @@ -102,9 +117,22 @@ defmodule MssqlEcto.Helpers do def ecto_to_db(:map), do: "nvarchar(4000)" def ecto_to_db({:map, _}), do: "nvarchar(4000)" def ecto_to_db(:utc_datetime), do: "datetime2" + def ecto_to_db(:utc_datetime_usec), do: "datetime2" def ecto_to_db(:naive_datetime), do: "datetime2" + def ecto_to_db(:naive_datetime_usec), do: "datetime2" def ecto_to_db(:timestamp), do: "datetime2" - def ecto_to_db(other), do: Atom.to_string(other) + + def ecto_to_db(:bigint), do: "bigint" + def ecto_to_db(:decimal), do: "decimal" + def ecto_to_db(:float), do: "float" + def ecto_to_db(:date), do: "date" + def ecto_to_db(:text), do: "text" + def ecto_to_db(:numeric), do: "numeric" + + def ecto_to_db(other) do + Logger.warn("type not explicitly handled: #{other}") + Atom.to_string(other) + end def error!(nil, message) do raise ArgumentError, message diff --git a/lib/connection/query/_query.ex b/lib/connection/query/_query.ex new file mode 100644 index 0000000..c53db99 --- /dev/null +++ b/lib/connection/query/_query.ex @@ -0,0 +1,467 @@ +defmodule MssqlEcto.Connection.Query do + @moduledoc false + + alias Ecto.Query + alias Query.{BooleanExpr, JoinExpr, QueryExpr} + alias MssqlEcto.Connection.Query.Expression + import MssqlEcto.Connection.Helper + + def all(query) do + sources = create_names(query) + {select_distinct, order_by_distinct} = distinct(query.distinct, sources, query) + + select = select(query, select_distinct, sources) + from = from(query, sources) + join = join(query, sources) + where = where(query, sources) + group_by = group_by(query, sources) + having = having(query, sources) + window = window(query, sources) + combinations = combinations(query) + order_by = order_by(query, order_by_distinct, sources) + offset = offset(query, sources) + lock = lock(query.lock) + + [ + select, + from, + join, + where, + group_by, + having, + window, + combinations, + order_by, + offset | lock + ] + end + + def update_all(%{from: %{source: source}} = query, prefix) do + sources = create_names(query) + {from, name} = get_source(query, sources, 0, source) + from = prefix_name(from, query) + + prefix = prefix || ["UPDATE ", name | " SET "] + table_alias = [" FROM ", from, " AS ", name] + + fields = update_fields(query, sources) + join = join(query, sources) + where = where(query, sources) + + [ + prefix, + fields, + output(query, sources, "INSERTED"), + table_alias, + join, + where + ] + end + + def delete_all(%{from: from} = query) do + sources = create_names(query) + {from, name} = get_source(query, sources, 0, from) + + join = join(query, sources) + where = where(query, sources) + + [ + "DELETE ", + name, + output(query, sources, "DELETED"), + " FROM ", + from, + " AS ", + name, + join, + where + ] + end + + def insert_as({%{sources: sources}, _, _}) do + {_expr, name, _schema} = create_name(sources, 0) + [" AS " | name] + end + + def insert_as({_, _, _}) do + [] + end + + def on_conflict({:raise, _, []}, _header), + do: [] + + def on_conflict({:nothing, _, targets}, _header), + do: [" ON CONFLICT ", conflict_target(targets) | "DO NOTHING"] + + def on_conflict({fields, _, targets}, _header) when is_list(fields), + do: [" ON CONFLICT ", conflict_target(targets), "DO " | replace(fields)] + + def on_conflict({query, _, targets}, _header), + do: [" ON CONFLICT ", conflict_target(targets), "DO " | update_all(query, "UPDATE SET ")] + + defp conflict_target({:constraint, constraint}), + do: ["ON CONSTRAINT ", quote_name(constraint), ?\s] + + defp conflict_target({:unsafe_fragment, fragment}), + do: [fragment, ?\s] + + defp conflict_target([]), + do: [] + + defp conflict_target(targets), + do: [?(, intersperse_map(targets, ?,, "e_name/1), ?), ?\s] + + defp replace(fields) do + [ + "UPDATE SET " + | intersperse_map(fields, ?,, fn field -> + quoted = quote_name(field) + [quoted, " = ", "EXCLUDED." | quoted] + end) + ] + end + + def insert_all(rows, counter) do + intersperse_reduce(rows, ?,, counter, fn row, counter -> + {row, counter} = insert_each(row, counter) + {[?(, row, ?)], counter} + end) + |> elem(0) + end + + defp insert_each(values, counter) do + intersperse_reduce(values, ?,, counter, fn + nil, counter -> + {"DEFAULT", counter} + + {%Query{} = query, params_counter}, counter -> + {[?(, all(query), ?)], counter + params_counter} + + _, counter -> + {[??], counter + 1} + end) + end + + ## Query generation + + defp select(%{select: %{fields: fields}} = query, select_distinct, sources) do + top = top(query, sources) + ["SELECT", top, select_distinct, ?\s | select_fields(fields, sources, query)] + end + + defp select_fields([], _sources, _query), + do: "'TRUE'" + + defp select_fields(fields, sources, query) do + intersperse_map(fields, ", ", fn + {key, value} -> + [Expression.expr(value, sources, query), " AS " | quote_name(key)] + + value -> + Expression.expr(value, sources, query) + end) + end + + defp distinct(nil, _, _), do: {[], []} + defp distinct(%QueryExpr{expr: []}, _, _), do: {[], []} + defp distinct(%QueryExpr{expr: true}, _, _), do: {" DISTINCT", []} + defp distinct(%QueryExpr{expr: false}, _, _), do: {[], []} + + defp distinct(%QueryExpr{expr: exprs}, sources, query) do + {[ + " DISTINCT ON (", + intersperse_map(exprs, ", ", fn {_, expr} -> Expression.expr(expr, sources, query) end), + ?) + ], exprs} + end + + defp from(%{from: %{hints: [_ | _]}} = query, _sources) do + error!(query, "table hints are not implemented") + end + + defp from(%{from: %{source: source}, prefix: prefix} = query, sources) do + {from, name} = get_source(query, sources, 0, source) + [" FROM ", add_prefix(prefix, from), " AS " | name] + end + + defp update_fields(%{updates: updates} = query, sources) do + for( + %{expr: expr} <- updates, + {op, kw} <- expr, + {key, value} <- kw, + do: update_op(op, key, value, sources, query) + ) + |> Enum.intersperse(", ") + end + + defp update_op(:set, key, value, sources, query) do + [quote_name(key), " = " | Expression.expr(value, sources, query)] + end + + defp update_op(:inc, key, value, sources, query) do + [ + quote_name(key), + " = ", + quote_qualified_name(key, sources, 0), + " + " + | Expression.expr(value, sources, query) + ] + end + + defp update_op(:push, key, value, sources, query) do + [ + quote_name(key), + " = array_append(", + quote_qualified_name(key, sources, 0), + ", ", + Expression.expr(value, sources, query), + ?) + ] + end + + defp update_op(:pull, key, value, sources, query) do + [ + quote_name(key), + " = array_remove(", + quote_qualified_name(key, sources, 0), + ", ", + Expression.expr(value, sources, query), + ?) + ] + end + + defp update_op(command, _key, _value, _sources, query) do + error!(query, "#{inspect(command)} not implemented for MSSQL") + end + + defp join(%{joins: []}, _sources), do: [] + + defp join(%{joins: joins} = query, sources) do + [ + ?\s + | intersperse_map(joins, ?\s, fn + %JoinExpr{ + on: %QueryExpr{expr: expr}, + qual: qual, + ix: ix, + source: source, + hints: hints + } -> + if hints != [] do + error!(query, "table hints are not implemented") + end + + {join, name} = get_source(query, sources, ix, source) + + prefix = query.prefix + + if is_nil(prefix) do + [join_qual(qual), join, " AS ", name | join_on(qual, expr, sources, query)] + else + prefix = quote_name(query.prefix) + + [ + join_qual(qual), + prefix, + ".", + join, + " AS ", + name | join_on(qual, expr, sources, query) + ] + end + end) + ] + end + + defp prefix_name(name, %{prefix: nil}), do: [name] + + defp prefix_name(name, %{prefix: prefix}) do + prefix = quote_name(prefix) + [prefix, ".", name] + end + + defp join_on(_qual, expr, sources, query), + do: [" ON ", Expression.paren_expr(expr, sources, query)] + + defp join_qual(:inner), do: "INNER JOIN " + defp join_qual(:inner_lateral), do: "INNER JOIN LATERAL " + defp join_qual(:left), do: "LEFT OUTER JOIN " + defp join_qual(:left_lateral), do: "LEFT OUTER JOIN LATERAL " + defp join_qual(:right), do: "RIGHT OUTER JOIN " + defp join_qual(:full), do: "FULL OUTER JOIN " + defp join_qual(:cross), do: "CROSS JOIN " + + defp where(%{wheres: wheres} = query, sources) do + boolean(" WHERE ", wheres, sources, query) + end + + defp having(%{havings: havings} = query, sources) do + boolean(" HAVING ", havings, sources, query) + end + + defp group_by(%{group_bys: []}, _sources), do: [] + + defp group_by(%{group_bys: group_bys} = query, sources) do + [ + " GROUP BY " + | intersperse_map(group_bys, ", ", fn + %QueryExpr{expr: expr} -> + intersperse_map(expr, ", ", &Expression.expr(&1, sources, query)) + end) + ] + end + + defp window(%{windows: []}, _sources), do: [] + + defp window(%{windows: windows} = query, sources) do + [ + " WINDOW " + | intersperse_map(windows, ", ", fn {name, %{expr: kw}} -> + [quote_name(name), " AS " | Expression.window_exprs(kw, sources, query)] + end) + ] + end + + defp order_by(%{order_bys: []}, _distinct, _sources), do: [] + + defp order_by(%{order_bys: order_bys} = query, distinct, sources) do + order_bys = Enum.flat_map(order_bys, & &1.expr) + + [ + " ORDER BY " + | intersperse_map( + distinct ++ order_bys, + ", ", + &Expression.order_by_expr(&1, sources, query) + ) + ] + end + + def top(%Query{offset: nil, limit: %QueryExpr{expr: expr}} = query, sources) do + [" TOP ", Expression.expr(expr, sources, query)] + end + + def top(_, _) do + [] + end + + def offset(%Query{offset: nil, limit: nil}, _sources), do: [] + + def offset( + %Query{offset: nil, limit: %QueryExpr{expr: _expr}} = _query, + _sources + ) do + [] + end + + def offset( + %Query{ + offset: %QueryExpr{expr: offset_expr}, + limit: %QueryExpr{expr: limit_expr} + } = query, + sources + ) do + [ + " OFFSET ", + Expression.expr(offset_expr, sources, query), + " ROWS FETCH NEXT ", + Expression.expr(limit_expr, sources, query), + " ROWS ONLY" + ] + end + + def offset(%Query{offset: %QueryExpr{expr: expr}} = query, sources) do + [" OFFSET ", Expression.expr(expr, sources, query), " ROWS"] + end + + defp combinations(%{combinations: combinations}) do + Enum.map(combinations, fn + {:union, query} -> [" UNION (", all(query), ")"] + {:union_all, query} -> [" UNION ALL (", all(query), ")"] + {:except, query} -> [" EXCEPT (", all(query), ")"] + {:except_all, query} -> [" EXCEPT ALL (", all(query), ")"] + {:intersect, query} -> [" INTERSECT (", all(query), ")"] + {:intersect_all, query} -> [" INTERSECT ALL (", all(query), ")"] + end) + end + + defp lock(nil), do: [] + defp lock(lock_clause), do: [?\s | lock_clause] + + defp boolean(_name, [], _sources, _query), do: [] + + defp boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do + [ + name + | Enum.reduce(query_exprs, {op, Expression.paren_expr(expr, sources, query)}, fn + %BooleanExpr{expr: expr, op: op}, {op, acc} -> + {op, [acc, operator_to_boolean(op), Expression.paren_expr(expr, sources, query)]} + + %BooleanExpr{expr: expr, op: op}, {_, acc} -> + {op, + [?(, acc, ?), operator_to_boolean(op), Expression.paren_expr(expr, sources, query)]} + end) + |> elem(1) + ] + end + + defp operator_to_boolean(:and), do: " AND " + defp operator_to_boolean(:or), do: " OR " + + def output(%Ecto.Query{select: nil}, _sources, _), do: [] + + def output( + %Ecto.Query{select: %{fields: fields}} = query, + _sources, + operation + ), + do: [ + " OUTPUT " + | select_fields(fields, {{nil, operation, nil}}, query) + ] + + def output([], _), do: [] + + def output(returning, operation), + do: [ + " OUTPUT " + | Enum.map_join(returning, ", ", fn column -> + [operation, ?. | quote_name(column)] + end) + ] + + defp create_names(%{sources: sources}) do + create_names(sources, 0, tuple_size(sources)) + |> List.to_tuple() + end + + defp create_names(sources, pos, limit) when pos < limit do + [create_name(sources, pos) | create_names(sources, pos + 1, limit)] + end + + defp create_names(_sources, pos, pos) do + [] + end + + defp create_name(sources, pos) do + case elem(sources, pos) do + {:fragment, _, _} -> + {nil, [?f | Integer.to_string(pos)], nil} + + {table, schema, prefix} -> + name = [create_alias(table) | Integer.to_string(pos)] + {quote_table(prefix, table), name, schema} + + %Ecto.SubQuery{} -> + {nil, [?s | Integer.to_string(pos)], nil} + end + end + + defp create_alias(<>) when first in ?a..?z when first in ?A..?Z do + <> + end + + defp create_alias(_) do + "t" + end +end diff --git a/lib/connection/query/expression.ex b/lib/connection/query/expression.ex new file mode 100644 index 0000000..0311018 --- /dev/null +++ b/lib/connection/query/expression.ex @@ -0,0 +1,293 @@ +defmodule MssqlEcto.Connection.Query.Expression do + @moduledoc false + + alias MssqlEcto.Connection.Query + import MssqlEcto.Connection.Helper + + binary_ops = [ + ==: " = ", + !=: " != ", + <=: " <= ", + >=: " >= ", + <: " < ", + >: " > ", + +: " + ", + -: " - ", + *: " * ", + /: " / ", + and: " AND ", + or: " OR ", + ilike: " ILIKE ", + like: " LIKE " + ] + + @binary_ops Keyword.keys(binary_ops) + + Enum.map(binary_ops, fn {op, str} -> + defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)} + end) + + defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)} + + defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do + paren_expr(expr, sources, query) + end + + defp op_to_binary(expr, sources, query) do + expr(expr, sources, query) + end + + def paren_expr(false, _sources, _query), do: "(0=1)" + def paren_expr(true, _sources, _query), do: "(1=1)" + + def paren_expr(expr, sources, query) do + [?(, expr(expr, sources, query), ?)] + end + + def expr(%Ecto.SubQuery{query: query}, _sources, _query) do + [?(, Query.all(query), ?)] + end + + def expr({:^, [], [_]}, _sources, _query) do + [??] + end + + def expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) when is_atom(field) do + quote_qualified_name(field, sources, idx) + end + + def expr({:&, _, [idx]}, sources, query) do + {_source, name, _schema} = elem(sources, idx) + + error!( + query, + "Microsoft SQL Server requires a schema module when using selector " <> + "#{inspect(name)} but none was given. " <> + "Please specify a schema or specify exactly which fields from " <> + "#{inspect(name)} you desire" + ) + end + + def expr({:&, _, [idx, fields, _counter]}, sources, query) do + {_, name, schema} = elem(sources, idx) + + if is_nil(schema) and is_nil(fields) do + error!( + query, + "Microsoft SQL Server requires a schema module when using selector " <> + "#{inspect(name)} but none was given. " <> + "Please specify a schema or specify exactly which fields from " <> + "#{inspect(name)} you desire" + ) + end + + intersperse_map(fields, ", ", &[name, ?. | quote_name(&1)]) + end + + def expr({:in, _, [_left, []]}, _sources, _query) do + "0=1" + end + + def expr({:in, _, [left, right]}, sources, query) when is_list(right) do + args = intersperse_map(right, ?,, &expr(&1, sources, query)) + [expr(left, sources, query), " IN (", args, ?)] + end + + def expr({:in, _, [_, {:^, _, [_, 0]}]}, _sources, _query) do + "0=1" + end + + def expr({:in, _, [left, {:^, _, [_ix, length]}]}, sources, query) do + args = + Enum.map(1..length, fn _ -> [??] end) + |> Enum.intersperse(?,) + + [expr(left, sources, query), " IN (", args, ?)] + end + + def expr({:in, _, [left, right]}, sources, query) do + [expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)] + end + + def expr({:is_nil, _, [arg]}, sources, query) do + [expr(arg, sources, query) | " IS NULL"] + end + + def expr({:not, _, [expr]}, sources, query) do + case expr do + {fun, _, _} when fun in @binary_ops -> + ["NOT (", expr(expr, sources, query), ?)] + + _ -> + ["~(", expr(expr, sources, query), ?)] + end + end + + def expr({:fragment, _, [kw]}, _sources, query) + when is_list(kw) or tuple_size(kw) == 3 do + error!( + query, + "Microsoft SQL Server adapter does not support keyword or interpolated fragments" + ) + end + + def expr({:fragment, _, parts}, sources, query) do + Enum.map(parts, fn + {:raw, part} -> part + {:expr, expr} -> expr(expr, sources, query) + end) + |> parens_for_select() + end + + # TODO timestamp and date types? is this correct + def expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do + [ + expr(datetime, sources, query), + "::timestamp + ", + interval(count, interval, sources, query) + ] + end + + def expr({:date_add, _, [date, count, interval]}, sources, query) do + [ + ?(, + expr(date, sources, query), + "::date + ", + interval(count, interval, sources, query) | ")::date" + ] + end + + def expr({:filter, _, [agg, filter]}, sources, query) do + aggregate = expr(agg, sources, query) + [aggregate, " FILTER (WHERE ", expr(filter, sources, query), ?)] + end + + def expr({:over, _, [agg, name]}, sources, query) when is_atom(name) do + aggregate = expr(agg, sources, query) + [aggregate, " OVER " | quote_name(name)] + end + + def expr({:over, _, [agg, kw]}, sources, query) do + aggregate = expr(agg, sources, query) + [aggregate, " OVER ", window_exprs(kw, sources, query)] + end + + def expr({:{}, _, elems}, sources, query) do + [?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)] + end + + def expr({:count, _, []}, _sources, _query), do: "count(*)" + + def expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do + {modifier, args} = + case args do + [rest, :distinct] -> {"DISTINCT ", [rest]} + _ -> {[], args} + end + + case handle_call(fun, length(args)) do + {:binary_op, op} -> + [left, right] = args + [op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)] + + {:fun, fun} -> + [fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)] + end + end + + def expr(list, sources, query) when is_list(list) do + ["ARRAY[", intersperse_map(list, ?,, &expr(&1, sources, query)), ?]] + end + + def expr(%Decimal{} = decimal, _sources, _query) do + Decimal.to_string(decimal, :normal) + end + + def expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query) + when is_binary(binary) do + ["0x", Base.encode16(binary, case: :lower)] + end + + def expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do + ["CAST(", expr(other, sources, query), " AS ", tagged_to_db(type) | ")"] + end + + def expr(nil, _sources, _query), do: "NULL" + def expr(true, _sources, _query), do: "1" + def expr(false, _sources, _query), do: "0" + + def expr(literal, _sources, _query) when is_binary(literal) do + [?\', escape_string(literal), ?\'] + end + + def expr(literal, _sources, _query) when is_integer(literal) do + Integer.to_string(literal) + end + + def expr(literal, _sources, _query) when is_float(literal) do + [Float.to_string(literal)] + end + + defp parens_for_select([first_expr | _] = expr) do + if is_binary(first_expr) and String.starts_with?(first_expr, ["SELECT", "select"]) do + [?(, expr, ?)] + else + expr + end + end + + defp interval(count, interval, _sources, _query) when is_integer(count) do + ["interval '", String.Chars.Integer.to_string(count), ?\s, interval, ?\'] + end + + defp interval(count, interval, _sources, _query) when is_float(count) do + count = :erlang.float_to_binary(count, [:compact, decimals: 16]) + ["interval '", count, ?\s, interval, ?\'] + end + + # TODO numeric data type? Is this correct? + defp interval(count, interval, sources, query) do + [ + ?(, + expr(count, sources, query), + "::numeric * ", + interval(1, interval, sources, query), + ?) + ] + end + + defp tagged_to_db({:array, type}), do: [tagged_to_db(type), ?[, ?]] + # Always use the largest possible type for integers + defp tagged_to_db(:id), do: "int" + defp tagged_to_db(:integer), do: "int" + defp tagged_to_db(type), do: ecto_to_db(type) + + def window_exprs(kw, sources, query) do + [?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)] + end + + defp window_expr({:partition_by, fields}, sources, query) do + ["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))] + end + + defp window_expr({:order_by, fields}, sources, query) do + ["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))] + end + + defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do + expr(fragment, sources, query) + end + + def order_by_expr({dir, expr}, sources, query) do + str = expr(expr, sources, query) + + case dir do + :asc -> str + :asc_nulls_last -> [str | " ASC NULLS LAST"] + :asc_nulls_first -> [str | " ASC NULLS FIRST"] + :desc -> [str | " DESC"] + :desc_nulls_last -> [str | " DESC NULLS LAST"] + :desc_nulls_first -> [str | " DESC NULLS FIRST"] + end + end +end diff --git a/lib/mssql_ecto.ex b/lib/mssql_ecto.ex deleted file mode 100644 index c692342..0000000 --- a/lib/mssql_ecto.ex +++ /dev/null @@ -1,41 +0,0 @@ -defmodule MssqlEcto do - @moduledoc false - @behaviour Ecto.Adapter.Storage - - use Ecto.Adapters.SQL, :mssqlex - - alias MssqlEcto.Migration - alias MssqlEcto.Storage - alias MssqlEcto.Structure - - import MssqlEcto.Type, only: [encode: 2, decode: 2] - - def autogenerate(:binary_id), do: Ecto.UUID.generate() - def autogenerate(type), do: super(type) - - def dumpers({:embed, _} = type, _), - do: [&Ecto.Adapters.SQL.dump_embed(type, &1)] - - def dumpers(:binary_id, _type), do: [] - def dumpers(:uuid, _type), do: [] - def dumpers(ecto_type, type), do: [type, &encode(&1, ecto_type)] - - def loaders({:embed, _} = type, _), - do: [&Ecto.Adapters.SQL.load_embed(type, &1)] - - def loaders(ecto_type, type), do: [&decode(&1, ecto_type), type] - - ## Migration - def supports_ddl_transaction?, do: Migration.supports_ddl_transaction?() - - ## Storage - def storage_up(opts), do: Storage.storage_up(opts) - def storage_down(opts), do: Storage.storage_down(opts) - - ## Structure - def structure_dump(default, config), - do: Structure.structure_dump(default, config) - - def structure_load(default, config), - do: Structure.structure_load(default, config) -end diff --git a/lib/mssql_ecto/connection.ex b/lib/mssql_ecto/connection.ex deleted file mode 100644 index 9a3ff31..0000000 --- a/lib/mssql_ecto/connection.ex +++ /dev/null @@ -1,203 +0,0 @@ -defmodule MssqlEcto.Connection do - alias Mssqlex.Query - alias MssqlEcto.Query, as: SQL - - @typedoc "The prepared query which is an SQL command" - @type prepared :: String.t() - - @typedoc "The cache query which is a DBConnection Query" - @type cached :: map - - @doc """ - Receives options and returns `DBConnection` supervisor child - specification. - """ - @spec child_spec(options :: Keyword.t()) :: {module, Keyword.t()} - def child_spec(opts) do - DBConnection.child_spec(Mssqlex.Protocol, opts) - end - - @doc """ - Prepares and executes the given query with `DBConnection`. - """ - @spec prepare_execute( - connection :: DBConnection.t(), - name :: String.t(), - prepared, - params :: [term], - options :: Keyword.t() - ) :: {:ok, query :: map, term} | {:error, Exception.t()} - def prepare_execute(conn, name, prepared_query, params, options) do - statement = sanitise_query(prepared_query) - ordered_params = order_params(prepared_query, params) - - case DBConnection.prepare_execute( - conn, - %Query{name: name, statement: statement}, - ordered_params, - options - ) do - {:ok, query, result} -> - {:ok, %{query | statement: prepared_query}, - process_rows(result, options)} - - {:error, %Mssqlex.Error{}} = error -> - if is_erlang_odbc_no_data_found_bug?(error, prepared_query) do - {:ok, %Query{name: "", statement: prepared_query}, - %{num_rows: 0, rows: []}} - else - error - end - - {:error, error} -> - raise error - end - end - - @doc """ - Executes the given prepared query with `DBConnection`. - """ - @spec execute( - connection :: DBConnection.t(), - prepared_query :: prepared, - params :: [term], - options :: Keyword.t() - ) :: {:ok, term} | {:error, Exception.t()} - @spec execute( - connection :: DBConnection.t(), - prepared_query :: cached, - params :: [term], - options :: Keyword.t() - ) :: {:ok, term} | {:error | :reset, Exception.t()} - def execute(conn, %Query{} = query, params, options) do - ordered_params = - query.statement - |> IO.iodata_to_binary() - |> order_params(params) - - sanitised_query = sanitise_query(query.statement) - query = Map.put(query, :statement, sanitised_query) - - case DBConnection.prepare_execute(conn, query, ordered_params, options) do - {:ok, _query, result} -> - {:ok, process_rows(result, options)} - - {:error, %Mssqlex.Error{}} = error -> - if is_erlang_odbc_no_data_found_bug?(error, query.statement) do - {:ok, %{num_rows: 0, rows: []}} - else - error - end - - {:error, error} -> - raise error - end - end - - def execute(conn, statement, params, options) do - execute(conn, %Query{name: "", statement: statement}, params, options) - end - - defp order_params(query, params) do - sanitised = - Regex.replace( - ~r/(([^\\]|^))["'].*?[^\\]['"]/, - IO.iodata_to_binary(query), - "\\g{1}" - ) - - ordering = - Regex.scan(~r/\?([0-9]+)/, sanitised) - |> Enum.map(fn [_, x] -> String.to_integer(x) end) - - if length(ordering) != length(params) do - raise "\nError: number of params received (#{length(params)}) does not match expected (#{ - length(ordering) - })" - end - - ordered_params = - ordering - |> Enum.reduce([], fn ix, acc -> [Enum.at(params, ix - 1) | acc] end) - |> Enum.reverse() - - case ordered_params do - [] -> params - _ -> ordered_params - end - end - - defp sanitise_query(query) do - query - |> IO.iodata_to_binary() - |> String.replace( - ~r/(\?([0-9]+))(?=(?:[^\\"']|[\\"'][^\\"']*[\\"'])*$)/, - "?" - ) - end - - defp is_erlang_odbc_no_data_found_bug?({:error, error}, statement) do - is_dml = - statement - |> IO.iodata_to_binary() - |> (fn string -> - String.starts_with?(string, "INSERT") || - String.starts_with?(string, "DELETE") || - String.starts_with?(string, "UPDATE") - end).() - - is_dml and error.message =~ "No SQL-driver information available." - end - - defp process_rows(result, options) do - decoder = options[:decode_mapper] || fn x -> x end - - Map.update!(result, :rows, fn row -> - unless is_nil(row), do: Enum.map(row, decoder) - end) - end - - @doc """ - Receives the exception returned by `query/4`. - The constraints are in the keyword list and must return the - constraint type, like `:unique`, and the constraint name as - a string, for example: - [unique: "posts_title_index"] - Must return an empty list if the error does not come - from any constraint. - """ - @spec to_constraints(exception :: Exception.t()) :: Keyword.t() - def to_constraints(%Mssqlex.Error{} = error), do: error.constraint_violations - - @doc """ - Returns a stream that prepares and executes the given query with - `DBConnection`. - """ - @spec stream( - connection :: DBConnection.conn(), - prepared_query :: prepared, - params :: [term], - options :: Keyword.t() - ) :: Enum.t() - def stream(_conn, _prepared, _params, _options) do - raise("not implemented") - end - - ## Queries - def all(query), do: SQL.all(query) - def update_all(query, prefix \\ nil), do: SQL.update_all(query, prefix) - @doc false - def delete_all(query), do: SQL.delete_all(query) - - def insert(prefix, table, header, rows, on_conflict, returning), - do: SQL.insert(prefix, table, header, rows, on_conflict, returning) - - def update(prefix, table, fields, filters, returning), - do: SQL.update(prefix, table, fields, filters, returning) - - def delete(prefix, table, filters, returning), - do: SQL.delete(prefix, table, filters, returning) - - ## Migration - def execute_ddl(command), do: MssqlEcto.Migration.execute_ddl(command) -end diff --git a/lib/mssql_ecto/query.ex b/lib/mssql_ecto/query.ex deleted file mode 100644 index 654edad..0000000 --- a/lib/mssql_ecto/query.ex +++ /dev/null @@ -1,241 +0,0 @@ -defmodule MssqlEcto.Query do - alias MssqlEcto.QueryString - - import MssqlEcto.Helpers - - @doc """ - Receives a query and must return a SELECT query. - """ - @spec all(query :: Ecto.Query.t()) :: String.t() - def all(query) do - sources = QueryString.create_names(query) - - {select_distinct, order_by_distinct} = - QueryString.distinct(query.distinct, sources, query) - - from = QueryString.from(query, sources) - select = QueryString.select(query, select_distinct, sources) - join = QueryString.join(query, sources) - where = QueryString.where(query, sources) - group_by = QueryString.group_by(query, sources) - having = QueryString.having(query, sources) - order_by = QueryString.order_by(query, order_by_distinct, sources) - offset = QueryString.offset(query, sources) - lock = QueryString.lock(query.lock) - - IO.iodata_to_binary([ - select, - from, - join, - where, - group_by, - having, - order_by, - offset | lock - ]) - end - - @doc """ - Receives a query and values to update and must return an UPDATE query. - """ - @spec update_all(query :: Ecto.Query.t()) :: String.t() - def update_all(%{from: from} = query, prefix \\ nil) do - sources = QueryString.create_names(query) - {from, name} = get_source(query, sources, 0, from) - - prefix = prefix || ["UPDATE ", name | " SET "] - table_alias = [" FROM ", from, " AS ", name] - fields = QueryString.update_fields(query, sources) - join = QueryString.join(query, sources) - where = QueryString.where(query, sources) - - IO.iodata_to_binary([ - prefix, - fields, - returning(query, sources, "INSERTED"), - table_alias, - join, - where - ]) - end - - @doc """ - Receives a query and must return a DELETE query. - """ - @spec delete_all(query :: Ecto.Query.t()) :: String.t() - def delete_all(%{from: from} = query) do - sources = QueryString.create_names(query) - {from, name} = get_source(query, sources, 0, from) - - join = QueryString.join(query, sources) - where = QueryString.where(query, sources) - - IO.iodata_to_binary([ - "DELETE ", - name, - returning(query, sources, "DELETED"), - " FROM ", - from, - " AS ", - name, - join, - where - ]) - end - - @doc """ - Returns an INSERT for the given `rows` in `table` returning - the given `returning`. - """ - @spec insert( - prefix :: String.t(), - table :: String.t(), - header :: [atom], - rows :: [[atom | nil]], - on_conflict :: Ecto.Adapter.on_conflict(), - returning :: [atom] - ) :: String.t() - def insert(prefix, table, header, rows, on_conflict, returning) do - included_fields = - header - |> Enum.filter(fn value -> Enum.any?(rows, fn row -> value in row end) end) - - if included_fields === [] do - [ - "INSERT INTO ", - quote_table(prefix, table), - returning(returning, "INSERTED"), - " DEFAULT VALUES ; " - ] - |> List.duplicate(length(rows)) - |> IO.iodata_to_binary() - else - included_rows = - Enum.map(rows, fn row -> - row - |> Enum.zip(header) - |> Enum.filter(fn {_row, col} -> col in included_fields end) - |> Enum.map(fn {row, _col} -> row end) - end) - - fields = intersperse_map(included_fields, ?,, "e_name/1) - - IO.iodata_to_binary([ - "INSERT INTO ", - quote_table(prefix, table), - " (", - fields, - ")", - returning(returning, "INSERTED"), - " VALUES ", - insert_all(included_rows, 1), - on_conflict(on_conflict, included_fields) - ]) - end - end - - defp on_conflict({:raise, _, []}, _header) do - [] - end - - defp on_conflict(_, _header) do - error!(nil, ":on_conflict options other than :raise are not yet supported") - end - - defp insert_all(rows, counter) do - intersperse_reduce(rows, ?,, counter, fn row, counter -> - {row, counter} = insert_each(row, counter) - {[?(, row, ?)], counter} - end) - |> elem(0) - end - - defp insert_each(values, counter) do - intersperse_reduce(values, ?,, counter, fn - nil, counter -> - {"DEFAULT", counter} - - _, counter -> - {[?? | Integer.to_string(counter)], counter + 1} - end) - end - - defp returning(%Ecto.Query{select: nil}, _sources, _), do: [] - - defp returning( - %Ecto.Query{select: %{fields: fields}} = query, - _sources, - operation - ), - do: [ - " OUTPUT " - | QueryString.select_fields(fields, {{nil, operation, nil}}, query) - ] - - defp returning([], _), do: [] - - defp returning(returning, operation), - do: [ - " OUTPUT " - | Enum.map_join(returning, ", ", fn column -> - [operation, ?. | quote_name(column)] - end) - ] - - @doc """ - Returns an UPDATE for the given `fields` in `table` filtered by - `filters` returning the given `returning`. - """ - @spec update( - prefix :: String.t(), - table :: String.t(), - fields :: [atom], - filters :: [atom], - returning :: [atom] - ) :: String.t() - def update(prefix, table, fields, filters, returning) do - {fields, count} = - intersperse_reduce(fields, ", ", 1, fn field, acc -> - {[quote_name(field), " = ?" | Integer.to_string(acc)], acc + 1} - end) - - {filters, _count} = - intersperse_reduce(filters, " AND ", count, fn field, acc -> - {[quote_name(field), " = ?" | Integer.to_string(acc)], acc + 1} - end) - - IO.iodata_to_binary([ - "UPDATE ", - quote_table(prefix, table), - " SET ", - fields, - returning(returning, "INSERTED"), - " WHERE ", - filters - ]) - end - - @doc """ - Returns a DELETE for the `filters` returning the given `returning`. - """ - @spec delete( - prefix :: String.t(), - table :: String.t(), - filters :: [atom], - returning :: [atom] - ) :: String.t() - def delete(prefix, table, filters, returning) do - {filters, _} = - intersperse_reduce(filters, " AND ", 1, fn field, acc -> - {[quote_name(field), " = ?", Integer.to_string(acc)], acc + 1} - end) - - IO.iodata_to_binary([ - "DELETE FROM ", - quote_table(prefix, table), - returning(returning, "DELETED"), - " WHERE ", - filters - ]) - end -end diff --git a/lib/mssql_ecto/query_string.ex b/lib/mssql_ecto/query_string.ex deleted file mode 100644 index 16abc98..0000000 --- a/lib/mssql_ecto/query_string.ex +++ /dev/null @@ -1,547 +0,0 @@ -defmodule MssqlEcto.QueryString do - alias Ecto.Query - alias Ecto.Query.{BooleanExpr, JoinExpr, QueryExpr} - alias MssqlEcto.Connection - alias MssqlEcto.Helpers - - binary_ops = [ - ==: " = ", - !=: " != ", - <=: " <= ", - >=: " >= ", - <: " < ", - >: " > ", - and: " AND ", - or: " OR ", - ilike: " ILIKE ", - like: " LIKE ", - in: " IN ", - is_nil: " WHERE " - ] - - @binary_ops Keyword.keys(binary_ops) - - Enum.map(binary_ops, fn {op, str} -> - def handle_call(unquote(op), 2), do: {:binary_op, unquote(str)} - end) - - def handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)} - - def select( - %Query{select: %{fields: fields}} = query, - select_distinct, - sources - ) do - [ - "SELECT", - top(query, sources), - select_distinct, - ?\s | select_fields(fields, sources, query) - ] - end - - def top(%Query{offset: nil, limit: %QueryExpr{expr: expr}} = query, sources) do - [" TOP ", expr(expr, sources, query)] - end - - def top(_, _) do - [] - end - - def select_fields([], _sources, _query), do: "'TRUE'" - - def select_fields(fields, sources, query) do - Helpers.intersperse_map(fields, ", ", fn - {key, value} -> - [expr(value, sources, query), " AS " | Helpers.quote_name(key)] - - value -> - expr(value, sources, query) - end) - end - - def distinct(nil, _, _), do: {[], []} - def distinct(%QueryExpr{expr: []}, _, _), do: {[], []} - def distinct(%QueryExpr{expr: true}, _, _), do: {" DISTINCT", []} - def distinct(%QueryExpr{expr: false}, _, _), do: {[], []} - - def distinct(%QueryExpr{expr: exprs}, sources, query) do - {[ - " DISTINCT ON (", - Helpers.intersperse_map(exprs, ", ", fn {_, expr} -> - expr(expr, sources, query) - end), - ?) - ], exprs} - end - - def from(%{from: from} = query, sources) do - {from, name} = Helpers.get_source(query, sources, 0, from) - [" FROM ", from, " AS " | name] - end - - def update_fields(%Query{updates: updates} = query, sources) do - for( - %{expr: expr} <- updates, - {op, kw} <- expr, - {key, value} <- kw, - do: update_op(op, key, value, sources, query) - ) - |> Enum.intersperse(", ") - end - - def update_op(:set, key, value, sources, query) do - [Helpers.quote_name(key), " = " | expr(value, sources, query)] - end - - def update_op(:inc, key, value, sources, query) do - [ - Helpers.quote_name(key), - " = ", - Helpers.quote_qualified_name(key, sources, 0), - " + " - | expr(value, sources, query) - ] - end - - def update_op(:push, key, value, sources, query) do - [ - Helpers.quote_name(key), - " = array_append(", - Helpers.quote_qualified_name(key, sources, 0), - ", ", - expr(value, sources, query), - ?) - ] - end - - def update_op(:pull, key, value, sources, query) do - [ - Helpers.quote_name(key), - " = array_remove(", - Helpers.quote_qualified_name(key, sources, 0), - ", ", - expr(value, sources, query), - ?) - ] - end - - def update_op(command, _key, _value, _sources, query) do - Helpers.error!( - query, - "Unknown update operation #{inspect(command)} for Microsoft SQL Server" - ) - end - - def using_join(%Query{joins: []}, _kind, _prefix, _sources), do: {[], []} - - def using_join(%Query{joins: joins} = query, kind, prefix, sources) do - froms = - Helpers.intersperse_map(joins, ", ", fn - %JoinExpr{qual: :inner, ix: ix, source: source} -> - {join, name} = Helpers.get_source(query, sources, ix, source) - [join, " AS " | name] - - %JoinExpr{qual: qual} -> - Helpers.error!( - query, - "Microsoft SQL Server supports only inner joins on #{kind}, got: `#{ - qual - }`" - ) - end) - - wheres = - for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins, - value != true, - do: expr |> Map.put(:__struct__, BooleanExpr) |> Map.put(:op, :and) - - {[?\s, prefix, ?\s | froms], wheres} - end - - def join(%Query{joins: []}, _sources), do: [] - - def join(%Query{joins: joins} = query, sources) do - [ - ?\s - | Helpers.intersperse_map(joins, ?\s, fn %JoinExpr{ - on: %QueryExpr{expr: expr}, - qual: qual, - ix: ix, - source: source - } -> - {join, name} = Helpers.get_source(query, sources, ix, source) - - [ - join_qual(qual), - join, - " AS ", - name, - " ON " | paren_expr(expr, sources, query) - ] - end) - ] - end - - def join_qual(:inner), do: "INNER JOIN " - def join_qual(:inner_lateral), do: "INNER JOIN LATERAL " - def join_qual(:left), do: "LEFT OUTER JOIN " - def join_qual(:left_lateral), do: "LEFT OUTER JOIN LATERAL " - def join_qual(:right), do: "RIGHT OUTER JOIN " - def join_qual(:full), do: "FULL OUTER JOIN " - def join_qual(:cross), do: "CROSS JOIN " - - def where(%Query{wheres: wheres} = query, sources) do - boolean(" WHERE ", wheres, sources, query) - end - - def having(%Query{havings: havings} = query, sources) do - boolean(" HAVING ", havings, sources, query) - end - - def group_by(%Query{group_bys: []}, _sources), do: [] - - def group_by(%Query{group_bys: group_bys} = query, sources) do - [ - " GROUP BY " - | Helpers.intersperse_map(group_bys, ", ", fn %QueryExpr{expr: expr} -> - Helpers.intersperse_map(expr, ", ", &expr(&1, sources, query)) - end) - ] - end - - def order_by(%Query{order_bys: []}, _distinct, _sources), do: [] - - def order_by(%Query{order_bys: order_bys} = query, distinct, sources) do - order_bys = Enum.flat_map(order_bys, & &1.expr) - - [ - " ORDER BY " - | Helpers.intersperse_map( - distinct ++ order_bys, - ", ", - &order_by_expr(&1, sources, query) - ) - ] - end - - def order_by_expr({dir, expr}, sources, query) do - str = expr(expr, sources, query) - - case dir do - :asc -> str - :desc -> [str | " DESC"] - end - end - - def offset(%Query{offset: nil, limit: nil}, _sources), do: [] - - def offset( - %Query{offset: nil, limit: %QueryExpr{expr: _expr}} = _query, - _sources - ) do - [] - end - - def offset( - %Query{ - offset: %QueryExpr{expr: offset_expr}, - limit: %QueryExpr{expr: limit_expr} - } = query, - sources - ) do - [ - " OFFSET ", - expr(offset_expr, sources, query), - " ROWS FETCH NEXT ", - expr(limit_expr, sources, query), - " ROWS ONLY" - ] - end - - def offset(%Query{offset: %QueryExpr{expr: expr}} = query, sources) do - [" OFFSET ", expr(expr, sources, query), " ROWS"] - end - - def lock(nil), do: [] - def lock(lock_clause), do: [?\s | lock_clause] - - def boolean(_name, [], _sources, _query), do: [] - - def boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do - [ - name - | Enum.reduce(query_exprs, {op, paren_expr(expr, sources, query)}, fn - %BooleanExpr{expr: expr, op: op}, {op, acc} -> - {op, - [acc, operator_to_boolean(op), paren_expr(expr, sources, query)]} - - %BooleanExpr{expr: expr, op: op}, {_, acc} -> - {op, - [ - ?(, - acc, - ?), - operator_to_boolean(op), - paren_expr(expr, sources, query) - ]} - end) - |> elem(1) - ] - end - - def operator_to_boolean(:and), do: " AND " - def operator_to_boolean(:or), do: " OR " - - def paren_expr(false, _sources, _query), do: "(0=1)" - def paren_expr(true, _sources, _query), do: "(1=1)" - - def paren_expr(expr, sources, query) do - [?(, expr(expr, sources, query), ?)] - end - - def expr({_type, [literal]}, sources, query) do - expr(literal, sources, query) - end - - def expr({:^, [], [ix]}, _sources, _query) do - [??, Integer.to_string(ix + 1)] - end - - def expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) - when is_atom(field) do - Helpers.quote_qualified_name(field, sources, idx) - end - - def expr({:&, _, [idx]}, sources, query) do - {_source, name, _schema} = elem(sources, idx) - - Helpers.error!( - query, - "Microsoft SQL Server requires a schema module when using selector " <> - "#{inspect(name)} but none was given. " <> - "Please specify a schema or specify exactly which fields from " <> - "#{inspect(name)} you desire" - ) - end - - def expr({:&, _, [idx, fields, _counter]}, sources, query) do - {_, name, schema} = elem(sources, idx) - - if is_nil(schema) and is_nil(fields) do - Helpers.error!( - query, - "Microsoft SQL Server requires a schema module when using selector " <> - "#{inspect(name)} but none was given. " <> - "Please specify a schema or specify exactly which fields from " <> - "#{inspect(name)} you desire" - ) - end - - Helpers.intersperse_map(fields, ", ", &[name, ?. | Helpers.quote_name(&1)]) - end - - def expr({:in, _, [_left, []]}, _sources, _query) do - "0=1" - end - - def expr({:in, _, [left, right]}, sources, query) when is_list(right) do - args = Helpers.intersperse_map(right, ?,, &expr(&1, sources, query)) - [expr(left, sources, query), " IN (", args, ?)] - end - - def expr({:in, _, [_, {:^, _, [_, 0]}]}, _sources, _query) do - "0=1" - end - - def expr({:in, _, [left, {:^, _, [ix, length]}]}, sources, query) do - args = - Enum.map((ix + 1)..(ix + length), fn i -> [??, to_string(i)] end) - |> Enum.intersperse(?,) - - [expr(left, sources, query), " IN (", args, ?)] - end - - def expr({:in, _, [left, right]}, sources, query) do - [expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)] - end - - def expr({:is_nil, _, [arg]}, sources, query) do - [expr(arg, sources, query) | " IS NULL"] - end - - def expr({:not, _, [expr]}, sources, query) do - case expr do - {fun, _, _} when fun in @binary_ops -> - ["NOT (", expr(expr, sources, query), ?)] - - _ -> - ["~(", expr(expr, sources, query), ?)] - end - end - - def expr(%Ecto.SubQuery{query: query} = subquery, _sources, _query) do - if Map.has_key?(subquery, :fields) do - query.select.fields - |> put_in(subquery.fields) - |> Connection.all() - else - Connection.all(query) - end - end - - def expr({:fragment, _, [kw]}, _sources, query) - when is_list(kw) or tuple_size(kw) == 3 do - Helpers.error!( - query, - "Microsoft SQL Server adapter does not support keyword or interpolated fragments" - ) - end - - def expr({:fragment, _, parts}, sources, query) do - Enum.map(parts, fn - {:raw, part} -> part - {:expr, expr} -> expr(expr, sources, query) - end) - end - - def expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do - [ - "CAST(DATEADD(", - interval, - ",", - expr(count, sources, query), - ",", - expr(datetime, sources, query) | ") AS DATETIME2)" - ] - end - - def expr({:date_add, _, [date, count, interval]}, sources, query) do - [ - "CAST(DATEADD(", - interval, - ",", - expr(count, sources, query), - ",", - expr(date, sources, query) | ") AS DATE)" - ] - end - - def expr({fun, _, args}, sources, query) - when is_atom(fun) and is_list(args) do - {modifier, args} = - case args do - [rest, :distinct] -> {"DISTINCT ", [rest]} - _ -> {[], args} - end - - case handle_call(fun, length(args)) do - {:binary_op, op} -> - [left, right] = args - - [ - op_to_binary(left, sources, query), - op | op_to_binary(right, sources, query) - ] - - {:fun, fun} -> - [ - fun, - ?(, - modifier, - Helpers.intersperse_map(args, ", ", &expr(&1, sources, query)), - ?) - ] - end - end - - def expr(list, sources, query) when is_list(list) do - ["ARRAY[", Helpers.intersperse_map(list, ?,, &expr(&1, sources, query)), ?]] - end - - def expr(%Decimal{} = decimal, _sources, _query) do - Decimal.to_string(decimal, :normal) - end - - def expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query) - when is_binary(binary) do - ["0x", Base.encode16(binary, case: :lower)] - end - - def expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do - [ - "CAST(", - expr(other, sources, query), - " AS ", - Helpers.ecto_to_db(type), - ")" - ] - end - - def expr(nil, _sources, _query), do: "NULL" - def expr(true, _sources, _query), do: "1" - def expr(false, _sources, _query), do: "0" - - def expr(literal, _sources, _query) when is_binary(literal) do - [?\', Helpers.escape_string(literal), ?\'] - end - - def expr(literal, _sources, _query) when is_integer(literal) do - Integer.to_string(literal) - end - - def expr(literal, _sources, _query) when is_float(literal) do - Float.to_string(literal) - end - - def interval(count, _interval, sources, query) do - [expr(count, sources, query)] - end - - def op_to_binary({op, _, [_, _]} = expr, sources, query) - when op in @binary_ops do - paren_expr(expr, sources, query) - end - - def op_to_binary(expr, sources, query) do - expr(expr, sources, query) - end - - def returning(%Query{select: nil}, _sources), do: [] - - def returning(%Query{select: %{fields: fields}} = query, sources), - do: [" RETURNING " | select_fields(fields, sources, query)] - - def returning([]), do: [] - - def returning(returning), - do: [ - " RETURNING " - | Helpers.intersperse_map(returning, ", ", &Helpers.quote_name/1) - ] - - def create_names(%{prefix: prefix, sources: sources}) do - create_names(prefix, sources, 0, tuple_size(sources)) |> List.to_tuple() - end - - def create_names(prefix, sources, pos, limit) when pos < limit do - current = - case elem(sources, pos) do - {table, schema} -> - name = [String.first(table) | Integer.to_string(pos)] - {Helpers.quote_table(prefix, table), name, schema} - - {:fragment, _, _} -> - {nil, [?f | Integer.to_string(pos)], nil} - - %Ecto.SubQuery{} -> - {nil, [?s | Integer.to_string(pos)], nil} - end - - [current | create_names(prefix, sources, pos + 1, limit)] - end - - def create_names(_prefix, _sources, pos, pos) do - [] - end -end diff --git a/lib/mssql_ecto/storage.ex b/lib/mssql_ecto/storage.ex deleted file mode 100644 index 17b9bfb..0000000 --- a/lib/mssql_ecto/storage.ex +++ /dev/null @@ -1,93 +0,0 @@ -defmodule MssqlEcto.Storage do - @behaviour Ecto.Adapter.Storage - - def storage_up(opts) do - database = - Keyword.fetch!(opts, :database) || - raise ":database is nil in repository configuration" - - opts = Keyword.put(opts, :database, nil) - - command = - ~s[CREATE DATABASE "#{database}"] - |> concat_if(opts[:collation], &"COLLATE '#{&1}'") - |> concat_if(opts[:template], &"TEMPLATE=#{&1}") - |> concat_if(opts[:lc_ctype], &"LC_CTYPE='#{&1}'") - |> concat_if(opts[:lc_collate], &"LC_COLLATE='#{&1}'") - - case run_query(command, opts) do - {:ok, _} -> - :ok - - {:error, %{odbc_code: :database_already_exists}} -> - {:error, :already_up} - - {:error, error} -> - {:error, Exception.message(error)} - end - end - - defp concat_if(content, nil, _fun), do: content - defp concat_if(content, value, fun), do: content <> " " <> fun.(value) - - @doc false - def storage_down(opts) do - database = - Keyword.fetch!(opts, :database) || - raise ":database is nil in repository configuration" - - command = ~s[DROP DATABASE "#{database}"] - opts = Keyword.put(opts, :database, nil) - - case run_query(command, opts) do - {:ok, _} -> - :ok - - {:error, %{odbc_code: :base_table_or_view_not_found}} -> - {:error, :already_down} - - {:error, error} -> - {:error, Exception.message(error)} - end - end - - defp run_query(sql, opts) do - {:ok, _} = Application.ensure_all_started(:mssqlex) - - opts = - opts - |> Keyword.drop([:name, :log]) - |> Keyword.put(:pool, DBConnection.Connection) - |> Keyword.put(:backoff_type, :stop) - - {:ok, pid} = Task.Supervisor.start_link() - - task = - Task.Supervisor.async_nolink(pid, fn -> - {:ok, conn} = DBConnection.start_link(Mssqlex.Protocol, opts) - value = MssqlEcto.Connection.execute(conn, sql, [], opts) - GenServer.stop(conn) - value - end) - - timeout = Keyword.get(opts, :timeout, 15_000) - - case Task.yield(task, timeout) || Task.shutdown(task) do - {:ok, {:ok, result}} -> - {:ok, result} - - {:ok, {:error, error}} -> - {:error, error} - - {:exit, {%{__struct__: struct} = error, _}} - when struct in [DBConnection.Error] -> - {:error, error} - - {:exit, reason} -> - {:error, RuntimeError.exception(Exception.format_exit(reason))} - - nil -> - {:error, RuntimeError.exception("command timed out")} - end - end -end diff --git a/lib/mssql_ecto/structure.ex b/lib/mssql_ecto/structure.ex deleted file mode 100644 index cfd5d2a..0000000 --- a/lib/mssql_ecto/structure.ex +++ /dev/null @@ -1,23 +0,0 @@ -defmodule MssqlEcto.Structure do - @behaviour Ecto.Adapter.Structure - - def structure_dump(_default, _config) do - # table = config[:migration_source] || "schema_migrations" - - raise "not implemented" - # with {:ok, versions} <- select_versions(table, config), - # {:ok, path} <- pg_dump(default, config), - # do: append_versions(table, versions, path) - end - - def structure_load(_default, _config) do - # path = config[:dump_path] || Path.join(default, "structure.sql") - - raise "not implemented" - - # case run_with_cmd("psql", config, ["--quiet", "--file", path, config[:database]]) do - # {_output, 0} -> {:ok, path} - # {output, _} -> {:error, output} - # end - end -end diff --git a/lib/mssql_ecto/type.ex b/lib/mssql_ecto/type.ex deleted file mode 100644 index 8920e41..0000000 --- a/lib/mssql_ecto/type.ex +++ /dev/null @@ -1,67 +0,0 @@ -defmodule MssqlEcto.Type do - @int_types [:bigint, :integer, :id, :serial] - @decimal_types [:numeric, :decimal] - - def encode(value, :bigint) do - {:ok, to_string(value)} - end - - def encode(value, :binary_id) when is_binary(value) do - Ecto.UUID.load(value) - end - - def encode(value, :decimal) do - try do - value - |> Decimal.to_integer() - |> decode(:integer) - rescue - _e in FunctionClauseError -> - {:ok, value} - end - end - - def encode(value, _type) do - {:ok, value} - end - - def decode(value, type) - when type in @int_types and is_binary(value) do - case Integer.parse(value) do - {int, _} -> {:ok, int} - :error -> {:error, "Not an integer id"} - end - end - - def decode(value, type) - when type in [:float] do - cond do - Decimal.decimal?(value) -> {:ok, Decimal.to_float(value)} - true -> {:ok, value} - end - end - - def decode(value, type) - when type in @decimal_types and is_binary(value) do - Decimal.parse(value) - end - - def decode(value, :uuid) do - Ecto.UUID.dump(value) - end - - def decode({date, {h, m, s}}, type) - when type in [:utc_datetime, :naive_datetime] do - {:ok, {date, {h, m, s, 0}}} - end - - def decode(value, type) - when type in [:date] and is_binary(value) do - result = value |> Date.from_iso8601!() |> Date.to_erl() - {:ok, result} - end - - def decode(value, _type) do - {:ok, value} - end -end diff --git a/mix.exs b/mix.exs index 9a1baf8..f07e40d 100644 --- a/mix.exs +++ b/mix.exs @@ -1,12 +1,12 @@ defmodule MssqlEcto.Mixfile do use Mix.Project - def project do + def project() do [ app: :mssql_ecto, - version: "1.2.0", + version: "2.0.0-beta.0", description: "Ecto Adapter for Microsoft SQL Server. Using Mssqlex.", - elixir: "~> 1.6", + elixir: "~> 1.8", build_embedded: Mix.env() == :prod, start_permanent: Mix.env() == :prod, deps: deps(), @@ -25,21 +25,33 @@ defmodule MssqlEcto.Mixfile do ] end - def application do + def application() do [extra_applications: [:logger]] end - defp deps do + defp deps() do [ - {:mssqlex, "~> 1.1.0"}, - {:ecto, "~> 2.2.0"}, - {:ex_doc, "~> 0.15", only: :dev, runtime: false}, + mssqlex_path(), + {:ecto_sql, "~> 3.2"}, + {:db_connection, "~> 2.1"}, + + # tooling + {:ex_doc, "~> 0.19", only: :dev, runtime: false}, + {:dialyxir, "~> 1.0.0-rc.6", only: [:dev], runtime: false}, {:excoveralls, "~> 0.6", only: :test}, {:inch_ex, "~> 0.5", only: :docs} ] end - defp package do + defp mssqlex_path() do + if path = System.get_env("MSSQLEX_PATH") do + {:mssqlex, path: path} + else + {:mssqlex, "2.0.0-beta.0"} + end + end + + defp package() do [ name: :mssql_ecto, files: ["lib", "mix.exs", "README.md", "LICENSE"], diff --git a/mix.lock b/mix.lock index ad4df63..d49cb56 100644 --- a/mix.lock +++ b/mix.lock @@ -1,22 +1,29 @@ %{ - "certifi": {:hex, :certifi, "1.2.1", "c3904f192bd5284e5b13f20db3ceac9626e14eeacfbb492e19583cf0e37b22be", [:rebar3], []}, - "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []}, - "db_connection": {:hex, :db_connection, "1.1.3", "89b30ca1ef0a3b469b1c779579590688561d586694a3ce8792985d4d7e575a61", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"}, - "decimal": {:hex, :decimal, "1.5.0", "b0433a36d0e2430e3d50291b1c65f53c37d56f83665b43d79963684865beab68", [:mix], [], "hexpm"}, - "earmark": {:hex, :earmark, "1.2.2", "f718159d6b65068e8daeef709ccddae5f7fdc770707d82e7d126f584cd925b74", [:mix], []}, - "ecto": {:hex, :ecto, "2.2.8", "a4463c0928b970f2cee722cd29aaac154e866a15882c5737e0038bbfcf03ec2c", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: true]}, {:decimal, "~> 1.2", [hex: :decimal, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.8.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.13.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"}, - "ex_doc": {:hex, :ex_doc, "0.16.2", "3b3e210ebcd85a7c76b4e73f85c5640c011d2a0b2f06dcdf5acdb2ae904e5084", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, optional: false]}]}, - "excoveralls": {:hex, :excoveralls, "0.7.1", "3dd659db19c290692b5e2c4a2365ae6d4488091a1ba58f62dcbdaa0c03da5491", [:mix], [{:exjsx, ">= 3.0.0", [hex: :exjsx, optional: false]}, {:hackney, ">= 0.12.0", [hex: :hackney, optional: false]}]}, - "exjsx": {:hex, :exjsx, "4.0.0", "60548841e0212df401e38e63c0078ec57b33e7ea49b032c796ccad8cde794b5c", [:mix], [{:jsx, "~> 2.8.0", [hex: :jsx, optional: false]}]}, - "hackney": {:hex, :hackney, "1.8.6", "21a725db3569b3fb11a6af17d5c5f654052ce9624219f1317e8639183de4a423", [:rebar3], [{:certifi, "1.2.1", [hex: :certifi, optional: false]}, {:idna, "5.0.2", [hex: :idna, optional: false]}, {:metrics, "1.0.1", [hex: :metrics, optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, optional: false]}]}, - "idna": {:hex, :idna, "5.0.2", "ac203208ada855d95dc591a764b6e87259cb0e2a364218f215ad662daa8cd6b4", [:rebar3], [{:unicode_util_compat, "0.2.0", [hex: :unicode_util_compat, optional: false]}]}, - "inch_ex": {:hex, :inch_ex, "0.5.6", "418357418a553baa6d04eccd1b44171936817db61f4c0840112b420b8e378e67", [:mix], [{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0", [hex: :poison, optional: false]}]}, - "jsx": {:hex, :jsx, "2.8.2", "7acc7d785b5abe8a6e9adbde926a24e481f29956dd8b4df49e3e4e7bcc92a018", [:mix, :rebar3], []}, - "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], []}, - "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], []}, - "mssqlex": {:hex, :mssqlex, "1.1.0", "11e35b7d035056af09d9c43d3cc4b8883240413eace738c050685c88e0356679", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm"}, - "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], []}, - "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], []}, - "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], []}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.2.0", "dbbccf6781821b1c0701845eaf966c9b6d83d7c3bfc65ca2b78b88b8678bfa35", [:rebar3], []}, + "certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"}, + "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"}, + "db_connection": {:hex, :db_connection, "2.1.1", "a51e8a2ee54ef2ae6ec41a668c85787ed40cb8944928c191280fe34c15b76ae5", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"}, + "decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"}, + "dialyxir": {:hex, :dialyxir, "1.0.0-rc.6", "78e97d9c0ff1b5521dd68041193891aebebce52fc3b93463c0a6806874557d7d", [:mix], [{:erlex, "~> 0.2.1", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm"}, + "earmark": {:hex, :earmark, "1.3.5", "0db71c8290b5bc81cb0101a2a507a76dca659513984d683119ee722828b424f6", [:mix], [], "hexpm"}, + "ecto": {:hex, :ecto, "3.2.0", "940e2598813f205223d60c78d66e514afe1db5167ed8075510a59e496619cfb5", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, + "ecto_sql": {:hex, :ecto_sql, "3.2.0", "751cea597e8deb616084894dd75cbabfdbe7255ff01e8c058ca13f0353a3921b", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.2.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"}, + "erlex": {:hex, :erlex, "0.2.4", "23791959df45fe8f01f388c6f7eb733cc361668cbeedd801bf491c55a029917b", [:mix], [], "hexpm"}, + "ex_doc": {:hex, :ex_doc, "0.21.1", "5ac36660846967cd869255f4426467a11672fec3d8db602c429425ce5b613b90", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, + "excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"}, + "gproc": {:hex, :gproc, "0.8.0", "cea02c578589c61e5341fce149ea36ccef236cc2ecac8691fba408e7ea77ec2f", [:rebar3], [], "hexpm"}, + "hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"}, + "idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, + "inch_ex": {:hex, :inch_ex, "0.5.6", "418357418a553baa6d04eccd1b44171936817db61f4c0840112b420b8e378e67", [:mix], [{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"}, + "jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"}, + "makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"}, + "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, + "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"}, + "mssqlex": {:hex, :mssqlex, "2.0.0-beta.0", "2b2821d7ff5d1e850726ed4ee21b681b289be083791cc07027cb83b5e03cdf19", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.8", [hex: :decimal, repo: "hexpm", optional: false]}, {:gproc, "~> 0.8", [hex: :gproc, repo: "hexpm", optional: false]}], "hexpm"}, + "nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"}, + "parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"}, + "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"}, + "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"}, + "telemetry": {:hex, :telemetry, "0.4.0", "8339bee3fa8b91cb84d14c2935f8ecf399ccd87301ad6da6b71c09553834b2ab", [:rebar3], [], "hexpm"}, + "unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm"}, } diff --git a/test/mssql_ecto/alter_table_test.exs b/test/mssql_ecto/alter_table_test.exs index cf81c63..3bda1df 100644 --- a/test/mssql_ecto/alter_table_test.exs +++ b/test/mssql_ecto/alter_table_test.exs @@ -64,8 +64,7 @@ defmodule MssqlEcto.AlterTableTest do end test "alter table with primary key" do - alter = - {:alter, table(:posts), [{:add, :my_pk, :serial, [primary_key: true]}]} + alter = {:alter, table(:posts), [{:add, :my_pk, :serial, [primary_key: true]}]} assert execute_ddl(alter) == [ """ diff --git a/test/mssql_ecto/constraint_test.exs b/test/mssql_ecto/constraint_test.exs index f29c195..6f7a669 100644 --- a/test/mssql_ecto/constraint_test.exs +++ b/test/mssql_ecto/constraint_test.exs @@ -4,9 +4,7 @@ defmodule MssqlEcto.ConstraintTest do import Ecto.Migration, only: [constraint: 2, constraint: 3] test "create check constraint" do - create = - {:create, - constraint(:products, "price_must_be_positive", check: "price > 0")} + create = {:create, constraint(:products, "price_must_be_positive", check: "price > 0")} assert execute_ddl(create) == [ @@ -54,8 +52,7 @@ defmodule MssqlEcto.ConstraintTest do ~s|ALTER TABLE "products" DROP CONSTRAINT "price_must_be_positive"| ] - drop = - {:drop, constraint(:products, "price_must_be_positive", prefix: "foo")} + drop = {:drop, constraint(:products, "price_must_be_positive", prefix: "foo")} assert execute_ddl(drop) == [ diff --git a/test/mssql_ecto/create_table_test.exs b/test/mssql_ecto/create_table_test.exs index 0b3795e..3f54e89 100644 --- a/test/mssql_ecto/create_table_test.exs +++ b/test/mssql_ecto/create_table_test.exs @@ -9,8 +9,7 @@ defmodule MssqlEcto.CreateTableTest do {:create, table(:posts), [ {:add, :name, :string, [default: "Untitled", size: 20, null: false]}, - {:add, :price, :numeric, - [precision: 8, scale: 2, default: {:fragment, "expr"}]}, + {:add, :price, :numeric, [precision: 8, scale: 2, default: {:fragment, "expr"}]}, {:add, :on_hand, :integer, [default: 0, null: true]}, {:add, :published_at, :"time without time zone", [null: true]}, {:add, :is_active, :boolean, [default: true]} @@ -48,22 +47,15 @@ defmodule MssqlEcto.CreateTableTest do [ {:add, :id, :serial, [primary_key: true]}, {:add, :category_0, %Reference{table: :categories}, []}, - {:add, :category_1, %Reference{table: :categories, name: :foo_bar}, - []}, - {:add, :category_2, - %Reference{table: :categories, on_delete: :nothing}, []}, - {:add, :category_3, - %Reference{table: :categories, on_delete: :delete_all}, + {:add, :category_1, %Reference{table: :categories, name: :foo_bar}, []}, + {:add, :category_2, %Reference{table: :categories, on_delete: :nothing}, []}, + {:add, :category_3, %Reference{table: :categories, on_delete: :delete_all}, [null: false]}, - {:add, :category_4, - %Reference{table: :categories, on_delete: :nilify_all}, []}, - {:add, :category_5, - %Reference{table: :categories, on_update: :nothing}, []}, - {:add, :category_6, - %Reference{table: :categories, on_update: :update_all}, + {:add, :category_4, %Reference{table: :categories, on_delete: :nilify_all}, []}, + {:add, :category_5, %Reference{table: :categories, on_update: :nothing}, []}, + {:add, :category_6, %Reference{table: :categories, on_update: :update_all}, [null: false]}, - {:add, :category_7, - %Reference{table: :categories, on_update: :nilify_all}, []}, + {:add, :category_7, %Reference{table: :categories, on_update: :nilify_all}, []}, {:add, :category_8, %Reference{ table: :categories, diff --git a/test/mssql_ecto/delete_all_test.exs b/test/mssql_ecto/delete_all_test.exs index 798ca18..563e748 100644 --- a/test/mssql_ecto/delete_all_test.exs +++ b/test/mssql_ecto/delete_all_test.exs @@ -53,24 +53,41 @@ defmodule MssqlEcto.DeleteAllTest do end test "delete all" do - query = Schema |> Queryable.to_query() |> normalize - assert SQL.delete_all(query) == ~s{DELETE s0 FROM "schema" AS s0} + query = + Schema + |> Queryable.to_query() + |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - query = from(e in Schema, where: e.x == 123) |> normalize + assert query == ~s{DELETE s0 FROM "schema" AS s0} - assert SQL.delete_all(query) == + query = + from(e in Schema, where: e.x == 123) + |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() + + assert query == ~s{DELETE s0 FROM "schema" AS s0 WHERE (s0."x" = 123)} - query = Schema |> join(:inner, [p], q in Schema2, p.x == q.z) |> normalize + query = + Schema + |> join(:inner, [p], q in Schema2, on: p.x == q.z) + |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - assert SQL.delete_all(query) == + assert query == ~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z")} query = from(e in Schema, where: e.x == 123, join: q in Schema2, on: e.x == q.z) |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - assert SQL.delete_all(query) == + assert query == ~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z") WHERE (s0."x" = 123)} query = @@ -81,22 +98,38 @@ defmodule MssqlEcto.DeleteAllTest do join: assoc(e, :permalink) ) |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - assert SQL.delete_all(query) == + assert query == ~s{DELETE s0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s1."z" = s0."x") INNER JOIN "schema3" AS s2 ON (s2."id" = s0."y") WHERE (s0."x" = 123)} end test "delete all with returning" do - query = Schema |> Queryable.to_query() |> select([m], m) |> normalize + query = + Schema + |> Queryable.to_query() + |> select([m], m) + |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - assert SQL.delete_all(query) == + assert query == ~s{DELETE s0 OUTPUT DELETED."id", DELETED."x", DELETED."y", DELETED."z", DELETED."w" FROM "schema" AS s0} end test "delete all with prefix" do - query = Schema |> Queryable.to_query() |> normalize + query = + Schema + |> Queryable.to_query() + + query = + %{query | prefix: "prefix"} + |> normalize + |> SQL.delete_all() + |> IO.iodata_to_binary() - assert SQL.delete_all(%{query | prefix: "prefix"}) == + assert query == ~s{DELETE s0 FROM "prefix"."schema" AS s0} end end diff --git a/test/mssql_ecto/delete_test.exs b/test/mssql_ecto/delete_test.exs index 5ff56b3..439a843 100644 --- a/test/mssql_ecto/delete_test.exs +++ b/test/mssql_ecto/delete_test.exs @@ -2,17 +2,24 @@ defmodule MssqlEcto.DeleteTest do use MssqlEcto.Case, async: true test "delete" do - query = SQL.delete(nil, "schema", [:x, :y], []) - assert query == ~s{DELETE FROM "schema" WHERE "x" = ?1 AND "y" = ?2} + query = + SQL.delete(nil, "schema", [:x, :y], []) + |> IO.iodata_to_binary() - query = SQL.delete(nil, "schema", [:x, :y], [:z]) + assert query == ~s{DELETE FROM "schema" WHERE "x" = ? AND "y" = ?} + + query = + SQL.delete(nil, "schema", [:x, :y], [:z]) + |> IO.iodata_to_binary() assert query == - ~s{DELETE FROM "schema" OUTPUT DELETED."z" WHERE "x" = ?1 AND "y" = ?2} + ~s{DELETE FROM "schema" OUTPUT DELETED."z" WHERE "x" = ? AND "y" = ?} - query = SQL.delete("prefix", "schema", [:x, :y], []) + query = + SQL.delete("prefix", "schema", [:x, :y], []) + |> IO.iodata_to_binary() assert query == - ~s{DELETE FROM "prefix"."schema" WHERE "x" = ?1 AND "y" = ?2} + ~s{DELETE FROM "prefix"."schema" WHERE "x" = ? AND "y" = ?} end end diff --git a/test/mssql_ecto/index_test.exs b/test/mssql_ecto/index_test.exs index 218fa25..3d6de7f 100644 --- a/test/mssql_ecto/index_test.exs +++ b/test/mssql_ecto/index_test.exs @@ -25,9 +25,7 @@ defmodule MssqlEcto.IndexTest do ~s|CREATE INDEX "posts_category_id_permalink_index" ON "foo"."posts" ("category_id", "permalink")| ] - create = - {:create, - index(:posts, ["lower(permalink)"], name: "posts$main", prefix: :foo)} + create = {:create, index(:posts, ["lower(permalink)"], name: "posts$main", prefix: :foo)} assert execute_ddl(create) == [~s|CREATE INDEX "posts$main" ON "foo"."posts" (lower(permalink))|] @@ -60,17 +58,14 @@ defmodule MssqlEcto.IndexTest do end test "create unique index with condition" do - create = - {:create, - index(:posts, [:permalink], unique: true, where: "public IS TRUE")} + create = {:create, index(:posts, [:permalink], unique: true, where: "public IS TRUE")} assert execute_ddl(create) == [ ~s|CREATE UNIQUE INDEX "posts_permalink_index" ON "posts" ("permalink") WHERE public IS TRUE| ] - create = - {:create, index(:posts, [:permalink], unique: true, where: :public)} + create = {:create, index(:posts, [:permalink], unique: true, where: :public)} assert execute_ddl(create) == [ diff --git a/test/mssql_ecto/insert_test.exs b/test/mssql_ecto/insert_test.exs index a73db3b..8e12eb7 100644 --- a/test/mssql_ecto/insert_test.exs +++ b/test/mssql_ecto/insert_test.exs @@ -6,9 +6,10 @@ defmodule MssqlEcto.InsertTest do test "insert" do query = SQL.insert(nil, "schema", [:x, :y], [[:x, :y]], {:raise, [], []}, [:id]) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") OUTPUT INSERTED."id" VALUES (?1,?2)} + ~s{INSERT INTO "schema" ("x","y") OUTPUT INSERTED."id" VALUES (?,?)} query = SQL.insert( @@ -19,29 +20,39 @@ defmodule MssqlEcto.InsertTest do {:raise, [], []}, [:id] ) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") OUTPUT INSERTED."id" VALUES (?1,?2),(DEFAULT,?3)} + ~s{INSERT INTO "schema" ("x","y") OUTPUT INSERTED."id" VALUES (?,?),(DEFAULT,?)} - query = SQL.insert(nil, "schema", [], [[]], {:raise, [], []}, [:id]) + query = + SQL.insert(nil, "schema", [], [[]], {:raise, [], []}, [:id]) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" OUTPUT INSERTED."id" DEFAULT VALUES ; } + ~s{INSERT INTO "schema" OUTPUT INSERTED."id" DEFAULT VALUES } + + query = + SQL.insert(nil, "schema", [], [[]], {:raise, [], []}, []) + |> IO.iodata_to_binary() - query = SQL.insert(nil, "schema", [], [[]], {:raise, [], []}, []) - assert query == ~s{INSERT INTO "schema" DEFAULT VALUES ; } + assert query == ~s{INSERT INTO "schema" DEFAULT VALUES } + + query = + SQL.insert("prefix", "schema", [], [[]], {:raise, [], []}, []) + |> IO.iodata_to_binary() - query = SQL.insert("prefix", "schema", [], [[]], {:raise, [], []}, []) - assert query == ~s{INSERT INTO "prefix"."schema" DEFAULT VALUES ; } + assert query == ~s{INSERT INTO "prefix"."schema" DEFAULT VALUES } end @tag skip: "Not yet implemented. Should consider MERGE for upserts" test "insert with on conflict" do query = SQL.insert(nil, "schema", [:x, :y], [[:x, :y]], {:nothing, [], []}, []) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") VALUES ($1,$2) ON CONFLICT DO NOTHING} + ~s{INSERT INTO "schema" ("x","y") VALUES (?,?) ON CONFLICT DO NOTHING} query = SQL.insert( @@ -52,9 +63,10 @@ defmodule MssqlEcto.InsertTest do {:nothing, [], [:x, :y]}, [] ) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") VALUES ($1,$2) ON CONFLICT ("x","y") DO NOTHING} + ~s{INSERT INTO "schema" ("x","y") VALUES (?,?) ON CONFLICT ("x","y") DO NOTHING} update = from("schema", update: [set: [z: "foo"]]) |> normalize(:update_all) @@ -62,9 +74,10 @@ defmodule MssqlEcto.InsertTest do SQL.insert(nil, "schema", [:x, :y], [[:x, :y]], {update, [], [:x, :y]}, [ :z ]) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" AS s0 ("x","y") OUTPUT INSERTED."z" VALUES ($1,$2) ON CONFLICT ("x","y") DO UPDATE SET "z" = 'foo'} + ~s{INSERT INTO "schema" AS s0 ("x","y") OUTPUT INSERTED."z" VALUES (?,?) ON CONFLICT ("x","y") DO UPDATE SET "z" = 'foo'} update = from("schema", update: [set: [z: ^"foo"]], where: [w: true]) @@ -74,9 +87,10 @@ defmodule MssqlEcto.InsertTest do SQL.insert(nil, "schema", [:x, :y], [[:x, :y]], {update, [], [:x, :y]}, [ :z ]) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" AS s0 ("x","y") OUTPUT INSERTED."z" VALUES ($1,$2) ON CONFLICT ("x","y") DO UPDATE SET "z" = $3 WHERE (s0."w" = TRUE)} + ~s{INSERT INTO "schema" AS s0 ("x","y") OUTPUT INSERTED."z" VALUES (?,?) ON CONFLICT ("x","y") DO UPDATE SET "z" = ?3 WHERE (s0."w" = TRUE)} # For :replace_all query = @@ -88,9 +102,10 @@ defmodule MssqlEcto.InsertTest do {:replace_all, [], [:id]}, [] ) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") VALUES ($1,$2) ON CONFLICT ("id") DO UPDATE SET "x" = EXCLUDED."x","y" = EXCLUDED."y"} + ~s{INSERT INTO "schema" ("x","y") VALUES (?,?) ON CONFLICT ("id") DO UPDATE SET "x" = EXCLUDED."x","y" = EXCLUDED."y"} query = SQL.insert( @@ -101,8 +116,9 @@ defmodule MssqlEcto.InsertTest do {:replace_all, [], []}, [] ) + |> IO.iodata_to_binary() assert query == - ~s{INSERT INTO "schema" ("x","y") VALUES ($1,$2) ON CONFLICT DO UPDATE SET "x" = EXCLUDED."x","y" = EXCLUDED."y"} + ~s{INSERT INTO "schema" ("x","y") VALUES (?,?) ON CONFLICT DO UPDATE SET "x" = EXCLUDED."x","y" = EXCLUDED."y"} end end diff --git a/test/mssql_ecto/join_test.exs b/test/mssql_ecto/join_test.exs index 5845b14..5ba9c22 100644 --- a/test/mssql_ecto/join_test.exs +++ b/test/mssql_ecto/join_test.exs @@ -51,24 +51,40 @@ defmodule MssqlEcto.JoinTest do end end + defp parse(query, prefix) do + query = + query + |> normalize + + SQL.all(%{query | prefix: prefix}) + |> IO.iodata_to_binary() + end + + defp parse(query) do + query + |> normalize + |> SQL.all() + |> IO.iodata_to_binary() + end + test "join" do query = Schema - |> join(:inner, [p], q in Schema2, p.x == q.z) + |> join(:inner, [p], q in Schema2, on: p.x == q.z) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z")} query = Schema - |> join(:inner, [p], q in Schema2, p.x == q.z) - |> join(:inner, [], Schema, true) + |> join(:inner, [p], q in Schema2, on: p.x == q.z) + |> join(:inner, [], Schema) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z") } <> ~s{INNER JOIN "schema" AS s2 ON (1=1)} end @@ -76,22 +92,22 @@ defmodule MssqlEcto.JoinTest do test "join with nothing bound" do query = Schema - |> join(:inner, [], q in Schema2, q.z == q.z) + |> join(:inner, [], q in Schema2, on: q.z == q.z) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s1."z" = s1."z")} end test "join without schema" do query = "posts" - |> join(:inner, [p], q in "comments", p.x == q.z) + |> join(:inner, [p], q in "comments", on: p.x == q.z) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "posts" AS p0 INNER JOIN "comments" AS c1 ON (p0."x" = c1."z")} end @@ -105,13 +121,13 @@ defmodule MssqlEcto.JoinTest do query = "comments" - |> join(:inner, [c], p in subquery(posts), true) + |> join(:inner, [c], p in subquery(posts)) |> select([_, p], p.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s1."x" FROM "comments" AS c0 } <> - ~s{INNER JOIN (SELECT p0."x" AS "x", p0."y" AS "y" FROM "posts" AS p0 WHERE (p0."title" = ?1)) AS s1 ON (1=1)} + ~s{INNER JOIN (SELECT p0."x" AS "x", p0."y" AS "y" FROM "posts" AS p0 WHERE (p0."title" = ?)) AS s1 ON (1=1)} posts = subquery( @@ -122,23 +138,23 @@ defmodule MssqlEcto.JoinTest do query = "comments" - |> join(:inner, [c], p in subquery(posts), true) + |> join(:inner, [c], p in subquery(posts)) |> select([_, p], p) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s1."x", s1."z" FROM "comments" AS c0 } <> - ~s{INNER JOIN (SELECT p0."x" AS "x", p0."y" AS "z" FROM "posts" AS p0 WHERE (p0."title" = ?1)) AS s1 ON (1=1)} + ~s{INNER JOIN (SELECT p0."x" AS "x", p0."y" AS "z" FROM "posts" AS p0 WHERE (p0."title" = ?)) AS s1 ON (1=1)} end test "join with prefix" do query = Schema - |> join(:inner, [p], q in Schema2, p.x == q.z) + |> join(:inner, [p], q in Schema2, on: p.x == q.z) |> select([], true) - |> normalize + |> parse("prefix") - assert SQL.all(%{query | prefix: "prefix"}) == + assert query == ~s{SELECT 'TRUE' FROM "prefix"."schema" AS s0 INNER JOIN "prefix"."schema2" AS s1 ON (s0."x" = s1."z")} end @@ -156,23 +172,23 @@ defmodule MssqlEcto.JoinTest do ) |> select([p], {p.id, ^0}) |> where([p], p.id > 0 and p.id < ^100) - |> normalize + |> parse() - assert SQL.all(query) == - ~s{SELECT s0."id", ?1 FROM "schema" AS s0 INNER JOIN } <> - ~s{(SELECT * FROM schema2 AS s2 WHERE s2.id = s0."x" AND s2.field = ?2) AS f1 ON (1=1) } <> - ~s{WHERE ((s0."id" > 0) AND (s0."id" < ?3))} + assert query == + ~s{SELECT s0."id", ? FROM "schema" AS s0 INNER JOIN } <> + ~s{(SELECT * FROM schema2 AS s2 WHERE s2.id = s0."x" AND s2.field = ?) AS f1 ON (1=1) } <> + ~s{WHERE ((s0."id" > 0) AND (s0."id" < ?))} end test "join with fragment and on defined" do query = Schema - |> join(:inner, [p], q in fragment("SELECT * FROM schema2"), q.id == p.id) + |> join(:inner, [p], q in fragment("SELECT * FROM schema2"), on: q.id == p.id) |> select([p], {p.id, ^0}) - |> normalize + |> parse() - assert SQL.all(query) == - ~s{SELECT s0."id", ?1 FROM "schema" AS s0 INNER JOIN } <> + assert query == + ~s{SELECT s0."id", ? FROM "schema" AS s0 INNER JOIN } <> ~s{(SELECT * FROM schema2) AS f1 ON (f1."id" = s0."id")} end @@ -190,12 +206,12 @@ defmodule MssqlEcto.JoinTest do ) |> select([p, q], {p.id, q.z}) |> where([p], p.id > 0 and p.id < ^100) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."id", f1."z" FROM "schema" AS s0 INNER JOIN LATERAL } <> - ~s{(SELECT * FROM schema2 AS s2 WHERE s2.id = s0."x" AND s2.field = ?1) AS f1 ON (1=1) } <> - ~s{WHERE ((s0."id" > 0) AND (s0."id" < ?2))} + ~s{(SELECT * FROM schema2 AS s2 WHERE s2.id = s0."x" AND s2.field = ?) AS f1 ON (1=1) } <> + ~s{WHERE ((s0."id" > 0) AND (s0."id" < ?))} end test "association join belongs_to" do @@ -203,9 +219,9 @@ defmodule MssqlEcto.JoinTest do Schema2 |> join(:inner, [c], p in assoc(c, :post)) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == "SELECT 'TRUE' FROM \"schema2\" AS s0 INNER JOIN \"schema\" AS s1 ON (s1.\"x\" = s0.\"z\")" end @@ -214,9 +230,9 @@ defmodule MssqlEcto.JoinTest do Schema |> join(:inner, [p], c in assoc(p, :comments)) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == "SELECT 'TRUE' FROM \"schema\" AS s0 INNER JOIN \"schema2\" AS s1 ON (s1.\"z\" = s0.\"x\")" end @@ -225,27 +241,29 @@ defmodule MssqlEcto.JoinTest do Schema |> join(:inner, [p], pp in assoc(p, :permalink)) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == "SELECT 'TRUE' FROM \"schema\" AS s0 INNER JOIN \"schema3\" AS s1 ON (s1.\"id\" = s0.\"y\")" end test "join produces correct bindings" do query = from(p in Schema, join: c in Schema2, on: true) - query = from(p in query, join: c in Schema2, on: true, select: {p.id, c.id}) - query = normalize(query) - assert SQL.all(query) == + query = + from(p in query, join: c in Schema2, on: true, select: {p.id, c.id}) + |> parse() + + assert query == "SELECT s0.\"id\", s2.\"id\" FROM \"schema\" AS s0 INNER JOIN \"schema2\" AS s1 ON (1=1) INNER JOIN \"schema2\" AS s2 ON (1=1)" end test "cross join" do query = from(p in Schema, cross_join: c in Schema2, select: {p.id, c.id}) - |> normalize() + |> parse() - assert SQL.all(query) == + assert query == "SELECT s0.\"id\", s1.\"id\" FROM \"schema\" AS s0 CROSS JOIN \"schema2\" AS s1 ON (1=1)" end end diff --git a/test/mssql_ecto/migration_test.exs b/test/mssql_ecto/migration_test.exs index c130aa9..7e39d59 100644 --- a/test/mssql_ecto/migration_test.exs +++ b/test/mssql_ecto/migration_test.exs @@ -16,8 +16,7 @@ defmodule MssqlEcto.MigrationTest do end test "rename table with prefix" do - rename = - {:rename, table(:posts, prefix: :foo), table(:new_posts, prefix: :foo)} + rename = {:rename, table(:posts, prefix: :foo), table(:new_posts, prefix: :foo)} assert execute_ddl(rename) == [ ~s|EXEC sp_rename 'foo.posts', 'new_posts', 'OBJECT'| diff --git a/test/mssql_ecto/select_test.exs b/test/mssql_ecto/select_test.exs index bfde18c..dd32be7 100644 --- a/test/mssql_ecto/select_test.exs +++ b/test/mssql_ecto/select_test.exs @@ -51,17 +51,21 @@ defmodule MssqlEcto.SelectTest do end end + defp parse(query) do + query |> normalize |> SQL.all() |> IO.iodata_to_binary() + end + test "from" do - query = Schema |> select([r], r.x) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" FROM "schema" AS s0} + query = Schema |> select([r], r.x) |> parse() + assert query == ~s{SELECT s0."x" FROM "schema" AS s0} end test "from without schema" do - query = "posts" |> select([r], r.x) |> normalize - assert SQL.all(query) == ~s{SELECT p0."x" FROM "posts" AS p0} + query = "posts" |> select([r], r.x) |> parse() + assert query == ~s{SELECT p0."x" FROM "posts" AS p0} - query = "posts" |> select([:x]) |> normalize - assert SQL.all(query) == ~s{SELECT p0."x" FROM "posts" AS p0} + query = "posts" |> select([:x]) |> parse() + assert query == ~s{SELECT p0."x" FROM "posts" AS p0} assert_raise Ecto.QueryError, ~r"Microsoft SQL Server requires a schema module", @@ -74,85 +78,118 @@ defmodule MssqlEcto.SelectTest do query = subquery("posts" |> select([r], %{x: r.x, y: r.y})) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM (SELECT p0."x" AS "x", p0."y" AS "y" FROM "posts" AS p0) AS s0} query = subquery("posts" |> select([r], %{x: r.x, z: r.y})) |> select([r], r) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x", s0."z" FROM (SELECT p0."x" AS "x", p0."y" AS "z" FROM "posts" AS p0) AS s0} end test "select" do - query = Schema |> select([r], {r.x, r.y}) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + query = Schema |> select([r], {r.x, r.y}) |> parse() + assert query == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} - query = Schema |> select([r], [r.x, r.y]) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + query = Schema |> select([r], [r.x, r.y]) |> parse() + assert query == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + + query = + Schema + |> select([r], struct(r, [:x, :y])) + |> parse() - query = Schema |> select([r], struct(r, [:x, :y])) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + assert query == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} end test "aggregates" do - query = Schema |> select([r], count(r.x)) |> normalize - assert SQL.all(query) == ~s{SELECT count(s0."x") FROM "schema" AS s0} + query = Schema |> select([r], count(r.x)) |> parse() + assert query == ~s{SELECT count(s0."x") FROM "schema" AS s0} - query = Schema |> select([r], count(r.x, :distinct)) |> normalize + query = + Schema + |> select([r], count(r.x, :distinct)) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT count(DISTINCT s0."x") FROM "schema" AS s0} end test "distinct" do - query = Schema |> distinct([r], r.x) |> select([r], {r.x, r.y}) |> normalize + query = + Schema + |> distinct([r], r.x) + |> select([r], {r.x, r.y}) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT ON (s0."x") s0."x", s0."y" FROM "schema" AS s0} query = - Schema |> distinct([r], desc: r.x) |> select([r], {r.x, r.y}) |> normalize + Schema + |> distinct([r], desc: r.x) + |> select([r], {r.x, r.y}) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT ON (s0."x") s0."x", s0."y" FROM "schema" AS s0} - query = Schema |> distinct([r], 2) |> select([r], r.x) |> normalize + query = + Schema + |> distinct([r], 2) + |> select([r], r.x) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT ON (2) s0."x" FROM "schema" AS s0} query = Schema |> distinct([r], [r.x, r.y]) |> select([r], {r.x, r.y}) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT ON (s0."x", s0."y") s0."x", s0."y" FROM "schema" AS s0} query = - Schema |> distinct([r], true) |> select([r], {r.x, r.y}) |> normalize + Schema + |> distinct([r], true) + |> select([r], {r.x, r.y}) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT s0."x", s0."y" FROM "schema" AS s0} query = - Schema |> distinct([r], false) |> select([r], {r.x, r.y}) |> normalize + Schema + |> distinct([r], false) + |> select([r], {r.x, r.y}) + |> parse() - assert SQL.all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + assert query == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} - query = Schema |> distinct(true) |> select([r], {r.x, r.y}) |> normalize + query = + Schema + |> distinct(true) + |> select([r], {r.x, r.y}) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT s0."x", s0."y" FROM "schema" AS s0} - query = Schema |> distinct(false) |> select([r], {r.x, r.y}) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} + query = + Schema + |> distinct(false) + |> select([r], {r.x, r.y}) + |> parse() + + assert query == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0} end test "distinct with order by" do @@ -161,9 +198,9 @@ defmodule MssqlEcto.SelectTest do |> order_by([r], [r.y]) |> distinct([r], desc: r.x) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT DISTINCT ON (s0."x") s0."x" FROM "schema" AS s0 ORDER BY s0."x" DESC, s0."y"} end @@ -173,9 +210,9 @@ defmodule MssqlEcto.SelectTest do |> where([r], r.x == 42) |> where([r], r.y != 43) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE (s0."x" = 42) AND (s0."y" != 43)} end @@ -185,9 +222,9 @@ defmodule MssqlEcto.SelectTest do |> or_where([r], r.x == 42) |> or_where([r], r.y != 43) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE (s0."x" = 42) OR (s0."y" != 43)} query = @@ -196,151 +233,220 @@ defmodule MssqlEcto.SelectTest do |> or_where([r], r.y != 43) |> where([r], r.z == 44) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE ((s0."x" = 42) OR (s0."y" != 43)) AND (s0."z" = 44)} end test "order by" do - query = Schema |> order_by([r], r.x) |> select([r], r.x) |> normalize + query = + Schema + |> order_by([r], r.x) + |> select([r], r.x) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x"} - query = Schema |> order_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize + query = + Schema + |> order_by([r], [r.x, r.y]) + |> select([r], r.x) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x", s0."y"} query = Schema |> order_by([r], asc: r.x, desc: r.y) |> select([r], r.x) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x", s0."y" DESC} - query = Schema |> order_by([r], []) |> select([r], r.x) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" FROM "schema" AS s0} + query = + Schema + |> order_by([r], []) + |> select([r], r.x) + |> parse() + + assert query == ~s{SELECT s0."x" FROM "schema" AS s0} end test "limit and offset" do - query = Schema |> limit([r], 3) |> select([], true) |> normalize - assert SQL.all(query) == ~s{SELECT TOP 3 'TRUE' FROM "schema" AS s0} + query = + Schema + |> limit([r], 3) + |> select([], true) + |> parse() - query = Schema |> offset([r], 5) |> select([], true) |> normalize - assert SQL.all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 OFFSET 5 ROWS} + assert query == ~s{SELECT TOP 3 'TRUE' FROM "schema" AS s0} query = - Schema |> offset([r], 5) |> limit([r], 3) |> select([], true) |> normalize + Schema + |> offset([r], 5) + |> select([], true) + |> parse() + + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 OFFSET 5 ROWS} - assert SQL.all(query) == + query = + Schema + |> offset([r], 5) + |> limit([r], 3) + |> select([], true) + |> parse() + + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 OFFSET 5 ROWS FETCH NEXT 3 ROWS ONLY} end - @tag skip: "Not yet supported" + # TODO why was this skipped? + # @tag skip: "Not yet supported" test "lock" do - query = Schema |> lock("FOR SHARE NOWAIT") |> select([], true) |> normalize + query = + Schema + |> lock("FOR SHARE NOWAIT") + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 FOR SHARE NOWAIT} end test "string escape" do - query = "schema" |> where(foo: "'\\ ") |> select([], true) |> normalize + query = + "schema" + |> where(foo: "'\\ ") + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM \"schema\" AS s0 WHERE (s0.\"foo\" = '''\\ ')} - query = "schema" |> where(foo: "'") |> select([], true) |> normalize + query = + "schema" + |> where(foo: "'") + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = '''')} end test "binary ops" do - query = Schema |> select([r], r.x == 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" = 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x == 2) |> parse() + assert query == ~s{SELECT s0."x" = 2 FROM "schema" AS s0} - query = Schema |> select([r], r.x != 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" != 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x != 2) |> parse() + assert query == ~s{SELECT s0."x" != 2 FROM "schema" AS s0} - query = Schema |> select([r], r.x <= 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" <= 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x <= 2) |> parse() + assert query == ~s{SELECT s0."x" <= 2 FROM "schema" AS s0} - query = Schema |> select([r], r.x >= 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" >= 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x >= 2) |> parse() + assert query == ~s{SELECT s0."x" >= 2 FROM "schema" AS s0} - query = Schema |> select([r], r.x < 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" < 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x < 2) |> parse() + assert query == ~s{SELECT s0."x" < 2 FROM "schema" AS s0} - query = Schema |> select([r], r.x > 2) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" > 2 FROM "schema" AS s0} + query = Schema |> select([r], r.x > 2) |> parse() + assert query == ~s{SELECT s0."x" > 2 FROM "schema" AS s0} end test "is_nil" do - query = Schema |> select([r], is_nil(r.x)) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" IS NULL FROM "schema" AS s0} + query = Schema |> select([r], is_nil(r.x)) |> parse() + assert query == ~s{SELECT s0."x" IS NULL FROM "schema" AS s0} + + query = Schema |> select([r], not is_nil(r.x)) |> parse() - query = Schema |> select([r], not is_nil(r.x)) |> normalize - assert SQL.all(query) == ~s{SELECT NOT (s0."x" IS NULL) FROM "schema" AS s0} + assert query == ~s{SELECT ~(s0."x" IS NULL) FROM "schema" AS s0} end test "fragments" do - query = Schema |> select([r], fragment("now")) |> normalize - assert SQL.all(query) == ~s{SELECT now FROM "schema" AS s0} + query = Schema |> select([r], fragment("now")) |> parse() + + assert query == ~s{SELECT now FROM "schema" AS s0} - query = Schema |> select([r], fragment("downcase(?)", r.x)) |> normalize - assert SQL.all(query) == ~s{SELECT downcase(s0."x") FROM "schema" AS s0} + query = + Schema + |> select([r], fragment("downcase(?)", r.x)) + |> parse() + + assert query == ~s{SELECT downcase(s0."x") FROM "schema" AS s0} value = 13 query = Schema |> select([r], fragment("downcase(?, ?)", r.x, ^value)) - |> normalize + |> parse() - assert SQL.all(query) == ~s{SELECT downcase(s0."x", ?1) FROM "schema" AS s0} + assert query == ~s{SELECT downcase(s0."x", ?) FROM "schema" AS s0} - query = Schema |> select([], fragment(title: 2)) |> normalize + query = Schema |> select([], fragment(title: 2)) assert_raise Ecto.QueryError, fn -> - SQL.all(query) + parse(query) end end test "literals" do - query = "schema" |> where(foo: true) |> select([], true) |> normalize + query = + "schema" + |> where(foo: true) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 1)} - query = "schema" |> where(foo: false) |> select([], true) |> normalize + query = + "schema" + |> where(foo: false) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 0)} - query = "schema" |> where(foo: "abc") |> select([], true) |> normalize + query = + "schema" + |> where(foo: "abc") + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 'abc')} query = - "schema" |> where(foo: <<0, ?a, ?b, ?c>>) |> select([], true) |> normalize + "schema" + |> where(foo: <<0, ?a, ?b, ?c>>) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 0x00616263)} - query = "schema" |> where(foo: 123) |> select([], true) |> normalize + query = + "schema" + |> where(foo: 123) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 123)} - query = "schema" |> where(foo: 123.0) |> select([], true) |> normalize + query = + "schema" + |> where(foo: 123.0) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 123.0)} end @@ -348,12 +454,16 @@ defmodule MssqlEcto.SelectTest do query = Schema |> select([], type(^"601d74e4-a8d3-4b6e-8365-eddb4c893327", Ecto.UUID)) - |> normalize + |> parse() + + assert query == ~s{SELECT CAST(? AS char(36)) FROM "schema" AS s0} - assert SQL.all(query) == ~s{SELECT CAST(?1 AS char(36)) FROM "schema" AS s0} + query = + Schema + |> select([], type(^1, Custom.Permalink)) + |> parse() - query = Schema |> select([], type(^1, Custom.Permalink)) |> normalize - assert SQL.all(query) == ~s{SELECT CAST(?1 AS int) FROM "schema" AS s0} + assert query == ~s{SELECT CAST(? AS int) FROM "schema" AS s0} end test "nested expressions" do @@ -362,45 +472,58 @@ defmodule MssqlEcto.SelectTest do query = from(r in Schema, []) |> select([r], (r.x > 0 and r.y > ^(-z)) or true) - |> normalize + |> parse() - assert SQL.all(query) == - ~s{SELECT ((s0."x" > 0) AND (s0."y" > ?1)) OR 1 FROM "schema" AS s0} + assert query == + ~s{SELECT ((s0."x" > 0) AND (s0."y" > ?)) OR 1 FROM "schema" AS s0} end test "in expression" do - query = Schema |> select([e], 1 in []) |> normalize - assert SQL.all(query) == ~s{SELECT 0=1 FROM "schema" AS s0} + query = Schema |> select([e], 1 in []) |> parse() + assert query == ~s{SELECT 0=1 FROM "schema" AS s0} + + query = Schema |> select([e], 1 in [1, e.x, 3]) |> parse() + + assert query == ~s{SELECT 1 IN (1,s0."x",3) FROM "schema" AS s0} + + query = Schema |> select([e], 1 in ^[]) |> parse() + assert query == ~s{SELECT 0=1 FROM "schema" AS s0} + + query = Schema |> select([e], 1 in ^[1, 2, 3]) |> parse() + + assert query == ~s{SELECT 1 IN (?,?,?) FROM "schema" AS s0} + + query = Schema |> select([e], 1 in [1, ^2, 3]) |> parse() - query = Schema |> select([e], 1 in [1, e.x, 3]) |> normalize - assert SQL.all(query) == ~s{SELECT 1 IN (1,s0."x",3) FROM "schema" AS s0} + assert query == ~s{SELECT 1 IN (1,?,3) FROM "schema" AS s0} - query = Schema |> select([e], 1 in ^[]) |> normalize - assert SQL.all(query) == ~s{SELECT 0=1 FROM "schema" AS s0} + query = Schema |> select([e], ^1 in [1, ^2, 3]) |> parse() - query = Schema |> select([e], 1 in ^[1, 2, 3]) |> normalize - assert SQL.all(query) == ~s{SELECT 1 IN (?1,?2,?3) FROM "schema" AS s0} + assert query == ~s{SELECT ? IN (1,?,3) FROM "schema" AS s0} - query = Schema |> select([e], 1 in [1, ^2, 3]) |> normalize - assert SQL.all(query) == ~s{SELECT 1 IN (1,?1,3) FROM "schema" AS s0} + query = Schema |> select([e], ^1 in ^[1, 2, 3]) |> parse() - query = Schema |> select([e], ^1 in [1, ^2, 3]) |> normalize - assert SQL.all(query) == ~s{SELECT ?1 IN (1,?2,3) FROM "schema" AS s0} + assert query == ~s{SELECT ? IN (?,?,?) FROM "schema" AS s0} - query = Schema |> select([e], ^1 in ^[1, 2, 3]) |> normalize - assert SQL.all(query) == ~s{SELECT ?1 IN (?2,?3,?4) FROM "schema" AS s0} + query = Schema |> select([e], 1 in e.w) |> parse() + assert query == ~s{SELECT 1 = ANY(s0."w") FROM "schema" AS s0} - query = Schema |> select([e], 1 in e.w) |> normalize - assert SQL.all(query) == ~s{SELECT 1 = ANY(s0."w") FROM "schema" AS s0} + query = + Schema + |> select([e], 1 in fragment("foo")) + |> parse() - query = Schema |> select([e], 1 in fragment("foo")) |> normalize - assert SQL.all(query) == ~s{SELECT 1 = ANY(foo) FROM "schema" AS s0} + assert query == ~s{SELECT 1 = ANY(foo) FROM "schema" AS s0} end test "having" do - query = Schema |> having([p], p.x == p.x) |> select([], true) |> normalize + query = + Schema + |> having([p], p.x == p.x) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 HAVING (s0."x" = s0."x")} query = @@ -408,17 +531,20 @@ defmodule MssqlEcto.SelectTest do |> having([p], p.x == p.x) |> having([p], p.y == p.y) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 HAVING (s0."x" = s0."x") AND (s0."y" = s0."y")} end test "or_having" do query = - Schema |> or_having([p], p.x == p.x) |> select([], true) |> normalize + Schema + |> or_having([p], p.x == p.x) + |> select([], true) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 HAVING (s0."x" = s0."x")} query = @@ -426,44 +552,70 @@ defmodule MssqlEcto.SelectTest do |> or_having([p], p.x == p.x) |> or_having([p], p.y == p.y) |> select([], true) - |> normalize + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT 'TRUE' FROM "schema" AS s0 HAVING (s0."x" = s0."x") OR (s0."y" = s0."y")} end test "group by" do - query = Schema |> group_by([r], r.x) |> select([r], r.x) |> normalize + query = + Schema + |> group_by([r], r.x) + |> select([r], r.x) + |> parse() - assert SQL.all(query) == + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY s0."x"} - query = Schema |> group_by([r], 2) |> select([r], r.x) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY 2} + query = + Schema + |> group_by([r], 2) + |> select([r], r.x) + |> parse() - query = Schema |> group_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY 2} - assert SQL.all(query) == + query = + Schema + |> group_by([r], [r.x, r.y]) + |> select([r], r.x) + |> parse() + + assert query == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY s0."x", s0."y"} - query = Schema |> group_by([r], []) |> select([r], r.x) |> normalize - assert SQL.all(query) == ~s{SELECT s0."x" FROM "schema" AS s0} + query = + Schema + |> group_by([r], []) + |> select([r], r.x) + |> parse() + + assert query == ~s{SELECT s0."x" FROM "schema" AS s0} end test "arrays and sigils" do - query = Schema |> select([], fragment("?", [1, 2, 3])) |> normalize - assert SQL.all(query) == ~s{SELECT ARRAY[1,2,3] FROM "schema" AS s0} + query = + Schema + |> select([], fragment("?", [1, 2, 3])) + |> parse() + + assert query == ~s{SELECT ARRAY[1,2,3] FROM "schema" AS s0} + + query = + Schema + |> select([], fragment("?", ~w(abc def))) + |> parse() - query = Schema |> select([], fragment("?", ~w(abc def))) |> normalize - assert SQL.all(query) == ~s{SELECT ARRAY['abc','def'] FROM "schema" AS s0} + assert query == ~s{SELECT ARRAY['abc','def'] FROM "schema" AS s0} end test "interpolated values" do query = "schema" |> select([m], {m.id, ^true}) - |> join(:inner, [], Schema2, fragment("?", ^true)) - |> join(:inner, [], Schema2, fragment("?", ^false)) + |> join(:inner, [], Schema2, on: fragment("?", ^true)) + |> join(:inner, [], Schema2, on: fragment("?", ^false)) |> where([], fragment("?", ^true)) |> where([], fragment("?", ^false)) |> having([], fragment("?", ^true)) @@ -474,62 +626,60 @@ defmodule MssqlEcto.SelectTest do |> order_by([], ^:x) |> limit([], ^4) |> offset([], ^5) - |> normalize + |> parse() result = - ~s/SELECT s0."id", ?1 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (?2) / <> - ~s/INNER JOIN "schema2" AS s2 ON (?3) WHERE (?4) AND (?5) / <> - ~s/GROUP BY ?6, ?7 HAVING (?8) AND (?9) / <> - ~s/ORDER BY ?10, s0."x" OFFSET ?12 ROWS FETCH NEXT ?11 ROWS ONLY/ + ~s/SELECT s0."id", ? FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (?) / <> + ~s/INNER JOIN "schema2" AS s2 ON (?) WHERE (?) AND (?) / <> + ~s/GROUP BY ?, ? HAVING (?) AND (?) / <> + ~s/ORDER BY ?, s0."x" OFFSET ? ROWS FETCH NEXT ? ROWS ONLY/ - assert SQL.all(query) == String.trim(result) + assert query == String.trim(result) end test "fragments and types" do query = - normalize( - from( - e in "schema", - where: - fragment( - "extract(? from ?) = ?", - ^"month", - e.start_time, - type(^"4", :integer) - ), - where: - fragment( - "extract(? from ?) = ?", - ^"year", - e.start_time, - type(^"2015", :integer) - ), - select: true - ) + from( + e in "schema", + where: + fragment( + "extract(? from ?) = ?", + ^"month", + e.start_time, + type(^"4", :integer) + ), + where: + fragment( + "extract(? from ?) = ?", + ^"year", + e.start_time, + type(^"2015", :integer) + ), + select: true ) + |> parse() result = "SELECT 'TRUE' FROM \"schema\" AS s0 " <> - "WHERE (extract(?1 from s0.\"start_time\") = CAST(?2 AS int)) " <> - "AND (extract(?3 from s0.\"start_time\") = CAST(?4 AS int))" + "WHERE (extract(? from s0.\"start_time\") = CAST(? AS int)) " <> + "AND (extract(? from s0.\"start_time\") = CAST(? AS int))" - assert SQL.all(query) == String.trim(result) + assert query == String.trim(result) end test "fragments allow ? to be escaped with backslash" do query = - normalize( - from( - e in "schema", - where: fragment("? = \"query\\?\"", e.start_time), - select: true - ) + from( + e in "schema", + where: fragment("? = \"query\\?\"", e.start_time), + select: true ) + |> parse() result = "SELECT 'TRUE' FROM \"schema\" AS s0 " <> "WHERE (s0.\"start_time\" = \"query?\")" - assert SQL.all(query) == String.trim(result) + assert query == String.trim(result) end end diff --git a/test/mssql_ecto/update_all_test.exs b/test/mssql_ecto/update_all_test.exs index b1d2c8b..2007468 100644 --- a/test/mssql_ecto/update_all_test.exs +++ b/test/mssql_ecto/update_all_test.exs @@ -52,37 +52,51 @@ defmodule MssqlEcto.UpdateAllTest do end test "update all" do - query = from(m in Schema, update: [set: [x: 0]]) |> normalize(:update_all) + query = + from(m in Schema, update: [set: [x: 0]]) + |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0 FROM "schema" AS s0} query = from(m in Schema, update: [set: [x: 0], inc: [y: 1, z: -3]]) |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0, "y" = s0."y" + 1, "z" = s0."z" + -3 FROM "schema" AS s0} query = from(e in Schema, where: e.x == 123, update: [set: [x: 0]]) |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0 FROM "schema" AS s0 WHERE (s0."x" = 123)} - query = from(m in Schema, update: [set: [x: ^0]]) |> normalize(:update_all) + query = + from(m in Schema, update: [set: [x: ^0]]) + |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == - ~s{UPDATE s0 SET "x" = ?1 FROM "schema" AS s0} + assert query == + ~s{UPDATE s0 SET "x" = ? FROM "schema" AS s0} query = Schema - |> join(:inner, [p], q in Schema2, p.x == q.z) + |> join(:inner, [p], q in Schema2, on: p.x == q.z) |> update([_], set: [x: 0]) |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 ON (s0."x" = s1."z")} query = @@ -94,8 +108,10 @@ defmodule MssqlEcto.UpdateAllTest do on: e.x == q.z ) |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0 FROM "schema" AS s0 INNER JOIN "schema2" AS s1 } <> ~s{ON (s0."x" = s1."z") WHERE (s0."x" = 123)} end @@ -105,28 +121,44 @@ defmodule MssqlEcto.UpdateAllTest do from(m in Schema, update: [set: [x: 0]]) |> select([m], m) |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "x" = 0 OUTPUT INSERTED."id", INSERTED."x", INSERTED."y", INSERTED."z", INSERTED."w" FROM "schema" AS s0} end - @tag skip: "Arrays not supported" + # TODO why was this skipped? + # @tag skip: "Arrays not supported" test "update all array ops" do - query = from(m in Schema, update: [push: [w: 0]]) |> normalize(:update_all) + query = + from(m in Schema, update: [push: [w: 0]]) + |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "w" = array_append(s0."w", 0) FROM "schema" AS s0} - query = from(m in Schema, update: [pull: [w: 0]]) |> normalize(:update_all) + query = + from(m in Schema, update: [pull: [w: 0]]) + |> normalize(:update_all) + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(query) == + assert query == ~s{UPDATE s0 SET "w" = array_remove(s0."w", 0) FROM "schema" AS s0} end test "update all with prefix" do - query = from(m in Schema, update: [set: [x: 0]]) |> normalize(:update_all) + query = + from(m in Schema, update: [set: [x: 0]]) + |> normalize(:update_all) + |> Map.put(:prefix, "prefix") + |> SQL.update_all() + |> IO.iodata_to_binary() - assert SQL.update_all(%{query | prefix: "prefix"}) == + assert query == ~s{UPDATE s0 SET "x" = 0 FROM "prefix"."schema" AS s0} end end diff --git a/test/mssql_ecto/update_test.exs b/test/mssql_ecto/update_test.exs index 6e72b8c..1c6248e 100644 --- a/test/mssql_ecto/update_test.exs +++ b/test/mssql_ecto/update_test.exs @@ -2,17 +2,24 @@ defmodule MssqlEcto.UpdateTest do use MssqlEcto.Case, async: true test "update" do - query = SQL.update(nil, "schema", [:x, :y], [:id], []) - assert query == ~s{UPDATE "schema" SET "x" = ?1, "y" = ?2 WHERE "id" = ?3} + query = + SQL.update(nil, "schema", [:x, :y], [:id], []) + |> IO.iodata_to_binary() - query = SQL.update(nil, "schema", [:x, :y], [:id], [:z]) + assert query == ~s{UPDATE "schema" SET "x" = ?, "y" = ? WHERE "id" = ?} + + query = + SQL.update(nil, "schema", [:x, :y], [:id], [:z]) + |> IO.iodata_to_binary() assert query == - ~s{UPDATE "schema" SET "x" = ?1, "y" = ?2 OUTPUT INSERTED."z" WHERE "id" = ?3} + ~s{UPDATE "schema" SET "x" = ?, "y" = ? OUTPUT INSERTED."z" WHERE "id" = ?} - query = SQL.update("prefix", "schema", [:x, :y], [:id], []) + query = + SQL.update("prefix", "schema", [:x, :y], [:id], []) + |> IO.iodata_to_binary() assert query == - ~s{UPDATE "prefix"."schema" SET "x" = ?1, "y" = ?2 WHERE "id" = ?3} + ~s{UPDATE "prefix"."schema" SET "x" = ?, "y" = ? WHERE "id" = ?} end end diff --git a/test/test_helper.exs b/test/test_helper.exs index 089bea2..3d81eaa 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -13,8 +13,7 @@ defmodule MssqlEcto.Case do end def normalize(query, operation \\ :all, counter \\ 0) do - {query, _params, _key} = - Ecto.Query.Planner.prepare(query, operation, MssqlEcto, counter) + {query, _params, _key} = Ecto.Query.Planner.plan(query, operation, MssqlEcto) case Ecto.Query.Planner.normalize(query, operation, MssqlEcto, counter) do # Ecto v2.2 onwards diff --git a/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs b/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs new file mode 100644 index 0000000..3058f45 --- /dev/null +++ b/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/49_migration_49.exs @@ -0,0 +1,11 @@ +defmodule Elixir.Ecto.Integration.MigratorTest.Migration49 do + use Ecto.Migration + + def up do + send :"test run down to/step migration", {:up, 49} + end + + def down do + send :"test run down to/step migration", {:down, 49} + end +end diff --git a/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs b/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs new file mode 100644 index 0000000..954b331 --- /dev/null +++ b/tmp/Elixir.Ecto.Integration.MigratorTest/test run down to/step migration/50_migration_50.exs @@ -0,0 +1,11 @@ +defmodule Elixir.Ecto.Integration.MigratorTest.Migration50 do + use Ecto.Migration + + def up do + send :"test run down to/step migration", {:up, 50} + end + + def down do + send :"test run down to/step migration", {:down, 50} + end +end