Search is not available for this dataset
commit_message
stringlengths
9
4.28k
sha
stringlengths
40
40
type
stringclasses
10 values
commit_url
stringlengths
78
90
masked_commit_message
stringlengths
2
4.27k
author_email
stringclasses
8 values
git_diff
stringlengths
2
37.4M
ci: disable doc publishing until geo blog can be fixed
989ad4f1e7a11b06af7e0e3ef840022687fbd7af
ci
https://github.com/ibis-project/ibis/commit/989ad4f1e7a11b06af7e0e3ef840022687fbd7af
disable doc publishing until geo blog can be fixed
{"ibis-docs-main.yml": "@@ -51,7 +51,8 @@ jobs:\n - name: verify internal links\n run: nix develop --ignore-environment '.#links' -c just checklinks --offline --no-progress\n \n- - name: build and push quarto docs\n- run: nix develop --ignore-environment --keep NETLIFY_AUTH_TOKEN -c just docs-deploy\n- env:\n- NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}\n+ # TODO: re-enable when geo blog is fixed (to_array)\n+ # - name: build and push quarto docs\n+ # run: nix develop --ignore-environment --keep NETLIFY_AUTH_TOKEN -c just docs-deploy\n+ # env:\n+ # NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}\n"}
test: remove unused `spread_type` function
527b7501e7b458d092baf18b6605f8a7f4595036
test
https://github.com/ibis-project/ibis/commit/527b7501e7b458d092baf18b6605f8a7f4595036
remove unused `spread_type` function
{"datatypes.py": "@@ -65,21 +65,3 @@ class BigQuerySchema(SchemaMapper):\n @classmethod\n def to_ibis(cls, fields: list[bq.SchemaField]) -> sch.Schema:\n return sch.Schema({f.name: cls._dtype_from_bigquery_field(f) for f in fields})\n-\n-\n-# TODO(kszucs): we can eliminate this function by making dt.DataType traversible\n-# using ibis.common.graph.Node, similarly to how we traverse ops.Node instances:\n-# node.find(types)\n-def spread_type(dt: dt.DataType):\n- \"\"\"Returns a generator that contains all the types in the given type.\n-\n- For complex types like set and array, it returns the types of the elements.\n- \"\"\"\n- if dt.is_array():\n- yield from spread_type(dt.value_type)\n- elif dt.is_struct():\n- for type_ in dt.types:\n- yield from spread_type(type_)\n- elif dt.is_map():\n- raise NotImplementedError(\"Maps are not supported in BigQuery\")\n- yield dt\n", "test_datatypes.py": "@@ -5,10 +5,7 @@ import sqlglot as sg\n from pytest import param\n \n import ibis.expr.datatypes as dt\n-from ibis.backends.bigquery.datatypes import (\n- BigQueryType,\n- spread_type,\n-)\n+from ibis.backends.bigquery.datatypes import BigQueryType\n \n \n @pytest.mark.parametrize(\n@@ -79,31 +76,6 @@ def test_simple_failure_mode(datatype):\n BigQueryType.to_string(datatype)\n \n \[email protected](\n- (\"type_\", \"expected\"),\n- [\n- param(\n- dt.int64,\n- [dt.int64],\n- ),\n- param(\n- dt.Array(dt.int64),\n- [dt.int64, dt.Array(value_type=dt.int64)],\n- ),\n- param(\n- dt.Struct.from_tuples([(\"a\", dt.Array(dt.int64))]),\n- [\n- dt.int64,\n- dt.Array(value_type=dt.int64),\n- dt.Struct.from_tuples([(\"a\", dt.Array(value_type=dt.int64))]),\n- ],\n- ),\n- ],\n-)\n-def test_spread_type(type_, expected):\n- assert list(spread_type(type_)) == expected\n-\n-\n def test_struct_type():\n dtype = dt.Array(dt.int64)\n parsed_type = sg.parse_one(\"BIGINT[]\", into=sg.exp.DataType, read=\"duckdb\")\n"}
chore(engine): removed fallbacks for rAF, it's useless
d6151fe959532afc5228aa63fd963406d9de777d
chore
https://github.com/tsparticles/tsparticles/commit/d6151fe959532afc5228aa63fd963406d9de777d
removed fallbacks for rAF, it's useless
{"Container.ts": "@@ -1,4 +1,3 @@\n-import { animate, cancelAnimation, isFunction } from \"../Utils/Utils\";\n import { Canvas } from \"./Canvas\";\n import type { ClickMode } from \"../Enums/Modes/ClickMode\";\n import type { Engine } from \"../engine\";\n@@ -17,6 +16,7 @@ import { Particles } from \"./Particles\";\n import { Retina } from \"./Retina\";\n import type { Vector } from \"./Utils/Vector\";\n import { getRangeValue } from \"../Utils/NumberUtils\";\n+import { isFunction } from \"../Utils/Utils\";\n import { loadOptions } from \"../Utils/OptionsUtils\";\n \n /**\n@@ -400,7 +400,7 @@ export class Container {\n \n let refreshTime = force;\n \n- this._drawAnimationFrame = animate()(async (timestamp) => {\n+ this._drawAnimationFrame = requestAnimationFrame(async (timestamp) => {\n if (refreshTime) {\n this.lastFrameTime = undefined;\n \n@@ -562,7 +562,7 @@ export class Container {\n }\n \n if (this._drawAnimationFrame !== undefined) {\n- cancelAnimation()(this._drawAnimationFrame);\n+ cancelAnimationFrame(this._drawAnimationFrame);\n \n delete this._drawAnimationFrame;\n }\n", "Utils.ts": "@@ -169,26 +169,6 @@ export function safeMatchMedia(query: string): MediaQueryList | undefined {\n return matchMedia(query);\n }\n \n-/**\n- * Calls the requestAnimationFrame function or a polyfill\n- * @returns the animation callback id, so it can be canceled\n- */\n-export function animate(): (callback: FrameRequestCallback) => number {\n- return isSsr()\n- ? (callback: FrameRequestCallback): number => setTimeout(callback)\n- : (callback: FrameRequestCallback): number => (requestAnimationFrame || setTimeout)(callback);\n-}\n-\n-/**\n- * Cancels the requestAnimationFrame function or a polyfill\n- * @returns the animation cancelling function\n- */\n-export function cancelAnimation(): (handle: number) => void {\n- return isSsr()\n- ? (handle: number): void => clearTimeout(handle)\n- : (handle: number): void => (cancelAnimationFrame || clearTimeout)(handle);\n-}\n-\n /**\n * Checks if a value is equal to the destination, if same type, or is in the provided array\n * @param value - the value to check\n"}
docs: fix broken links from Semrush report (#7025)
39eebf622666fdf220f889850fe7e4981cd90d08
docs
https://github.com/wzhiqing/cube/commit/39eebf622666fdf220f889850fe7e4981cd90d08
fix broken links from Semrush report (#7025)
{"AlertBox.tsx": "@@ -1,13 +1,13 @@\n-import React from 'react';\n-import classes from './AlertBox.module.css';\n-import classnames from 'classnames/bind';\n+import React from \"react\";\n+import classes from \"./AlertBox.module.css\";\n+import classnames from \"classnames/bind\";\n const cn = classnames.bind(classes);\n \n export enum AlertBoxTypes {\n- DANGER = 'danger',\n- INFO = 'info',\n- SUCCESS = 'success',\n- WARNING = 'warning',\n+ DANGER = \"danger\",\n+ INFO = \"info\",\n+ SUCCESS = \"success\",\n+ WARNING = \"warning\",\n }\n \n declare const TypeToEmoji: {\n@@ -19,55 +19,64 @@ declare const TypeToEmoji: {\n type CalloutType = keyof typeof TypeToEmoji;\n \n export type AlertBoxProps = {\n- children: string;\n+ children: React.ReactNode;\n heading?: string;\n type: AlertBoxTypes;\n-}\n+};\n \n const typeMapping: Record<AlertBoxTypes, CalloutType> = {\n- 'danger': 'error',\n- info: 'info',\n- warning: 'warning',\n- success: 'default',\n-}\n+ danger: \"error\",\n+ info: \"info\",\n+ warning: \"warning\",\n+ success: \"default\",\n+};\n \n const iconMapping: Record<string, any> = {\n- 'danger': '\ud83d\udeab',\n- info: '\u2139\ufe0f',\n- warning: '\u26a0\ufe0f',\n- success: '\u2705',\n+ danger: \"\ud83d\udeab\",\n+ info: \"\u2139\ufe0f\",\n+ warning: \"\u26a0\ufe0f\",\n+ success: \"\u2705\",\n };\n \n export const AlertBox = ({ children, heading, type }: AlertBoxProps) => {\n- const header = heading\n- ? (\n- <div className={classes.AlertBox__header}>\n- <span className={cn('AlertBox__HeaderIcon')}>{iconMapping[type]}</span>\n- {heading}\n- </div>\n- )\n- : null;\n+ const header = heading ? (\n+ <div className={classes.AlertBox__header}>\n+ <span className={cn(\"AlertBox__HeaderIcon\")}>{iconMapping[type]}</span>\n+ {heading}\n+ </div>\n+ ) : null;\n \n return (\n- <div className={cn('AlertBox__Wrapper', `AlertBox__Wrapper--${typeMapping[type]}`)}>\n+ <div\n+ className={cn(\n+ \"AlertBox__Wrapper\",\n+ `AlertBox__Wrapper--${typeMapping[type]}`\n+ )}\n+ >\n {header}\n- <div className={classes.AlertBox__content}>\n- {children}\n- </div>\n+ <div className={classes.AlertBox__content}>{children}</div>\n </div>\n- )\n-}\n+ );\n+};\n \n-export type AlertBoxSubclass = Omit<AlertBoxProps, 'type'>;\n+export type AlertBoxSubclass = Omit<AlertBoxProps, \"type\">;\n \n export type DangerBoxProps = AlertBoxSubclass;\n-export const DangerBox = (props: DangerBoxProps) => <AlertBox type={AlertBoxTypes.DANGER} {...props} />;\n+export const DangerBox = (props: DangerBoxProps) => (\n+ <AlertBox type={AlertBoxTypes.DANGER} {...props} />\n+);\n \n export type InfoBoxProps = AlertBoxSubclass;\n-export const InfoBox = (props: InfoBoxProps) => <AlertBox type={AlertBoxTypes.INFO} {...props} />;\n+export const InfoBox = (props: InfoBoxProps) => (\n+ <AlertBox type={AlertBoxTypes.INFO} {...props} />\n+);\n \n export type SuccessBoxProps = AlertBoxSubclass;\n-export const SuccessBox = (props: SuccessBoxProps) => <AlertBox type={AlertBoxTypes.SUCCESS} {...props} />;\n+export const SuccessBox = (props: SuccessBoxProps) => (\n+ <AlertBox type={AlertBoxTypes.SUCCESS} {...props} />\n+);\n \n export type WarningBoxProps = AlertBoxSubclass;\n-export const WarningBox = (props: WarningBoxProps) => <AlertBox type={AlertBoxTypes.WARNING} {...props} />;\n+export const WarningBox = (props: WarningBoxProps) => (\n+ <AlertBox type={AlertBoxTypes.WARNING} {...props} />\n+);\n", "CommunitySupportedDriver.tsx": "@@ -0,0 +1,20 @@\n+import { WarningBox } from \"@/components/mdx/AlertBox/AlertBox\";\n+import { Link } from \"@/components/overrides/Anchor/Link\";\n+\n+export interface CommunitySupportedDriverProps {\n+ dataSource: string;\n+}\n+\n+export const CommunitySupportedDriver = ({\n+ dataSource,\n+}: CommunitySupportedDriverProps) => {\n+ return (\n+ <WarningBox>\n+ The driver for {dataSource} is{\" \"}\n+ <Link href=\"/product/configuration/data-sources#driver-support\">\n+ community-supported\n+ </Link>{\" \"}\n+ and is not supported by Cube or the vendor.\n+ </WarningBox>\n+ );\n+};\n", "index.ts": "@@ -26,6 +26,7 @@ import { Table } from '@/components/overrides/Table/Table';\n import { Td } from '@/components/overrides/Table/Td';\n import { Th } from '@/components/overrides/Table/Th';\n import { Tr } from '@/components/overrides/Table/Tr';\n+import { CommunitySupportedDriver } from '@/components/mdx/Banners/CommunitySupportedDriver';\n \n export const components = {\n ...Buttons,\n@@ -54,6 +55,8 @@ export const components = {\n Diagram,\n YouTubeVideo,\n \n+ CommunitySupportedDriver,\n+\n // Overrides\n h1: H1,\n a: Link,\n", "real-time-data-fetch.mdx": "@@ -108,9 +108,9 @@ const Chart = ({ query }) => {\n ## Refresh Rate\n \n As in the case of a regular data fetch, real-time data fetch obeys\n-[`refresh_key` refresh rules](caching#refresh-keys). In order to provide a\n-desired refresh rate, `refresh_key` should reflect the rate of change of the\n-underlying data set; the querying time should also be much less than the desired\n-refresh rate. Please use the\n+[`refresh_key` refresh rules](/product/caching#refresh-keys). In order to\n+provide a desired refresh rate, `refresh_key` should reflect the rate of change\n+of the underlying data set; the querying time should also be much less than the\n+desired refresh rate. Please use the\n [`every`](/product/data-modeling/reference/cube#refresh_key) parameter to adjust\n the refresh interval.\n", "_meta.js": "@@ -7,7 +7,7 @@ module.exports = {\n \"elasticsearch\": \"Elasticsearch\",\n \"firebolt\": \"Firebolt\",\n \"google-bigquery\": \"Google BigQuery\",\n- \"hive\": \"Hive\",\n+ \"hive\": \"Hive / SparkSQL\",\n \"ksqldb\": \"ksqlDB\",\n \"materialize\": \"Materialize\",\n \"mongodb\": \"MongoDB\",\n", "druid.mdx": "@@ -5,11 +5,7 @@ redirect_from:\n \n # Druid\n \n-<WarningBox>\n- The driver for Druid is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"Druid\" />\n \n ## Prerequisites\n \n", "elasticsearch.mdx": "@@ -5,11 +5,7 @@ redirect_from:\n \n # Elasticsearch\n \n-<WarningBox>\n- The driver for Elasticsearch is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"Elasticsearch\" />\n \n ## Prerequisites\n \n", "hive.mdx": "@@ -3,13 +3,9 @@ redirect_from:\n - /config/databases/hive-sparksql\n ---\n \n-# Hive\n+# Hive / SparkSQL\n \n-<WarningBox>\n- The driver for Hive/SparkSQL is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"Hive / SparkSQL\" />\n \n ## Prerequisites\n \n", "mongodb.mdx": "@@ -5,11 +5,7 @@ redirect_from:\n \n # MongoDB\n \n-<WarningBox>\n- The driver for MongoDB is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"MongoDB\" />\n \n ## Prerequisites\n \n", "oracle.mdx": "@@ -5,11 +5,7 @@ redirect_from:\n \n # Oracle\n \n-<WarningBox>\n- The driver for Oracle is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"Oracle\" />\n \n ## Prerequisites\n \n", "sqlite.mdx": "@@ -5,11 +5,7 @@ redirect_from:\n \n # SQLite\n \n-<WarningBox>\n- The driver for SQLite is{\" \"}\n- <a href=\"../databases#driver-support\">community-supported</a> and is not\n- supported by Cube or the vendor.\n-</WarningBox>\n+<CommunitySupportedDriver dataSource=\"SQLite\" />\n \n ## Prerequisites\n \n", "visualization-tools.mdx": "@@ -135,17 +135,17 @@ Cube provides integration libraries for popular front-end frameworks:\n \n <Grid imageSize={[56, 56]}>\n <GridItem\n- url=\"../frontend-introduction/react\"\n+ url=\"/product/apis-integrations/javascript-sdk/react\"\n imageUrl=\"https://static.cube.dev/icons/react.svg\"\n title=\"React\"\n />\n <GridItem\n- url=\"../frontend-introduction/vue\"\n+ url=\"/product/apis-integrations/javascript-sdk/vue\"\n imageUrl=\"https://static.cube.dev/icons/vue.svg\"\n title=\"Vue\"\n />\n <GridItem\n- url=\"../frontend-introduction/angular\"\n+ url=\"/product/apis-integrations/javascript-sdk/angular\"\n imageUrl=\"https://static.cube.dev/icons/angular.svg\"\n title=\"Angular\"\n />\n@@ -159,17 +159,17 @@ out REST and GraphQL APIs.\n \n <Grid imageSize={[56, 56]}>\n <GridItem\n- url=\"../backend/sql\"\n+ url=\"/product/apis-integrations/sql-api\"\n imageUrl=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/sql.svg\"\n title=\"SQL API\"\n />\n <GridItem\n- url=\"../rest-api\"\n+ url=\"/product/apis-integrations/rest-api\"\n imageUrl=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/rest.svg\"\n title=\"REST API\"\n />\n <GridItem\n- url=\"../backend/graphql\"\n+ url=\"/product/apis-integrations/graphql-api\"\n imageUrl=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/static/icons/graphql.svg\"\n title=\"GraphQL API\"\n />\n", "observable.mdx": "@@ -211,4 +211,4 @@ You can also create a visualization of the executed REST API request.\n \n [ref-getting-started]: /product/getting-started/cloud\n [ref-sql-api]: /product/apis-integrations/sql-api\n-[ref-rest-api]: /backend/rest-api\n+[ref-rest-api]: /product/apis-integrations/rest-api\n", "concepts.mdx": "@@ -536,7 +536,8 @@ Pre-Aggregations][ref-caching-preaggs-intro].\n /product/data-modeling/reference/joins#relationship\n [ref-schema-ref-sql]: /product/data-modeling/reference/cube#sql\n [ref-schema-ref-sql-table]: /product/data-modeling/reference/cube#sql_table\n-[ref-tutorial-incremental-preagg]: /incremental-pre-aggregations\n+[ref-tutorial-incremental-preagg]:\n+ /product/data-modeling/reference/pre-aggregations#incremental\n [self-dimensions]: #dimensions\n [self-measures]: #measures\n [wiki-olap]: https://en.wikipedia.org/wiki/Online_analytical_processing\n", "learn-more.mdx": "@@ -24,7 +24,7 @@ Cube can be queried in a variety of ways. Explore how to use\n \n ## Caching\n \n-Learn more about the [two-level cache](/docs/caching) and how\n+Learn more about the [two-level cache](/product/caching) and how\n [pre-aggregations help speed up queries](/product/caching/getting-started-pre-aggregations).\n For a deeper dive, take a look at the\n [related recipes](/guides/recipes/overview#recipes-query-acceleration).\n", "integrations.mdx": "@@ -38,12 +38,12 @@ following guides and configuration examples to get tool-specific instructions:\n \n <Grid imageSize={[56, 56]}>\n <GridItem\n- url=\"datadog\"\n+ url=\"integrations/datadog\"\n imageUrl=\"https://static.cube.dev/icons/datadog.svg\"\n title=\"Datadog\"\n />\n <GridItem\n- url=\"grafana-cloud\"\n+ url=\"integrations/grafana-cloud\"\n imageUrl=\"https://static.cube.dev/icons/grafana.svg\"\n title=\"Grafana Cloud\"\n />\n", "index.d.ts": "@@ -116,12 +116,12 @@ declare module '@cubejs-client/react' {\n \n type QueryRendererProps = {\n /**\n- * Analytic query. [Learn more about it's format](query-format)\n+ * Analytic query. [Learn more about it's format](/product/apis-integrations/rest-api/query-format)\n */\n query: Query | Query[];\n queries?: { [key: string]: Query };\n /**\n- * Indicates whether the generated by `Cube.js` SQL Code should be requested. See [rest-api#sql](rest-api#api-reference-v-1-sql). When set to `only` then only the request to [/v1/sql](rest-api#api-reference-v-1-sql) will be performed. When set to `true` the sql request will be performed along with the query request. Will not be performed if set to `false`\n+ * Indicates whether the generated by `Cube.js` SQL Code should be requested. See [rest-api#sql](/reference/rest-api#v1sql). When set to `only` then only the request to [/v1/sql](/reference/rest-api#v1sql) will be performed. When set to `true` the sql request will be performed along with the query request. Will not be performed if set to `false`\n */\n loadSql?: 'only' | boolean;\n /**\n@@ -459,7 +459,7 @@ declare module '@cubejs-client/react' {\n */\n skip?: boolean;\n /**\n- * Use continuous fetch behavior. See [Real-Time Data Fetch](real-time-data-fetch)\n+ * Use continuous fetch behavior. See [Real-Time Data Fetch](/product/apis-integrations/rest-api/real-time-data-fetch)\n */\n subscribe?: boolean;\n /**\n"}
feat: octal Debug representation of `tree::EntryMode`. This makes it easier to reason about.
cd61c25369d3e39b6160bac4b332b177dabddf4b
feat
https://github.com/Byron/gitoxide/commit/cd61c25369d3e39b6160bac4b332b177dabddf4b
octal Debug representation of `tree::EntryMode`. This makes it easier to reason about.
{"tree_with_rewrites.rs": "@@ -14,25 +14,19 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n Addition {\n location: \"a\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"b\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"d\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -42,9 +36,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(587ff082e0b98914788500eae5dd6a33f04883c9),\n },\n Addition {\n@@ -54,9 +46,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n ]\n@@ -76,25 +66,19 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n Addition {\n location: \"a\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"b\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"d\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -104,9 +88,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(587ff082e0b98914788500eae5dd6a33f04883c9),\n },\n Addition {\n@@ -116,9 +98,7 @@ fn empty_to_new_tree_without_rename_tracking() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n ]\n@@ -153,35 +133,23 @@ fn changes_against_modified_tree_with_filename_tracking() -> crate::Result {\n [\n Modification {\n location: \"a\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(78981922613b2afb6025042ff6bd878ac1994e85),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(b4f17b61de71d9b2e54ac9e62b1629ae2d97a6a7),\n },\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(e5c63aefe4327cb1c780c71966b678ce8e4225da),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(c7ac5f82f536976f3561c9999b5f11e5893358be),\n },\n Modification {\n location: \"dir/c\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(6695780ceb14b05e076a99bbd2babf34723b3464),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),\n },\n ]\n@@ -198,35 +166,23 @@ fn changes_against_modified_tree_with_filename_tracking() -> crate::Result {\n [\n Modification {\n location: \"a\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(78981922613b2afb6025042ff6bd878ac1994e85),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(b4f17b61de71d9b2e54ac9e62b1629ae2d97a6a7),\n },\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(e5c63aefe4327cb1c780c71966b678ce8e4225da),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(c7ac5f82f536976f3561c9999b5f11e5893358be),\n },\n Modification {\n location: \"c\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(6695780ceb14b05e076a99bbd2babf34723b3464),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),\n },\n ]\n@@ -340,40 +296,28 @@ fn rename_by_similarity() -> crate::Result {\n [\n Modification {\n location: \"b\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(61780798228d17af2d34fce4cfbdf35556832472),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),\n },\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(d1622e275dbb2cb3215a0bdcd2fc77273891f360),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),\n },\n Deletion {\n location: \"dir/c\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),\n },\n Addition {\n location: \"dir/c-moved\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f01e8ddf5adc56985b9a1cda6d7c7ef9e3abe034),\n },\n ]\n@@ -404,31 +348,21 @@ fn rename_by_similarity() -> crate::Result {\n [\n Modification {\n location: \"b\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(61780798228d17af2d34fce4cfbdf35556832472),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),\n },\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(d1622e275dbb2cb3215a0bdcd2fc77273891f360),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),\n },\n Rewrite {\n source_location: \"dir/c\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(40006fcef15a8853a1b7ae186d93b7d680fd29cf),\n diff: Some(\n@@ -440,9 +374,7 @@ fn rename_by_similarity() -> crate::Result {\n similarity: 0.65,\n },\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f01e8ddf5adc56985b9a1cda6d7c7ef9e3abe034),\n location: \"dir/c-moved\",\n relation: None,\n@@ -508,26 +440,18 @@ fn copies_by_identity() -> crate::Result {\n [\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(6602e61ea053525e4907e155c0b3da3a269e1385),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(f00c965d8307308469e537302baa73048488f162),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f00c965d8307308469e537302baa73048488f162),\n location: \"c1\",\n relation: None,\n@@ -535,15 +459,11 @@ fn copies_by_identity() -> crate::Result {\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(f00c965d8307308469e537302baa73048488f162),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f00c965d8307308469e537302baa73048488f162),\n location: \"c2\",\n relation: None,\n@@ -551,15 +471,11 @@ fn copies_by_identity() -> crate::Result {\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(f00c965d8307308469e537302baa73048488f162),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f00c965d8307308469e537302baa73048488f162),\n location: \"dir/c3\",\n relation: None,\n@@ -592,26 +508,18 @@ fn copies_by_similarity() -> crate::Result {\n [\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(1d7e20e07562a54af0408fd2669b0c56a6faa6f0),\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n location: \"c4\",\n relation: None,\n@@ -619,9 +527,7 @@ fn copies_by_similarity() -> crate::Result {\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: Some(\n@@ -633,9 +539,7 @@ fn copies_by_similarity() -> crate::Result {\n similarity: 0.8888889,\n },\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),\n location: \"c5\",\n relation: None,\n@@ -643,9 +547,7 @@ fn copies_by_similarity() -> crate::Result {\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: Some(\n@@ -657,9 +559,7 @@ fn copies_by_similarity() -> crate::Result {\n similarity: 0.8888889,\n },\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n location: \"dir/c6\",\n relation: None,\n@@ -729,15 +629,11 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {\n [\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n location: \"c6\",\n relation: None,\n@@ -745,15 +641,11 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {\n },\n Rewrite {\n source_location: \"dir/c6\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n location: \"c7\",\n relation: None,\n@@ -761,9 +653,7 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {\n },\n Rewrite {\n source_location: \"c5\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),\n diff: Some(\n@@ -775,9 +665,7 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {\n similarity: 0.75,\n },\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(97b3d1a5707f8a11fa5fa8bc6c3bd7b3965601fd),\n location: \"newly-added\",\n relation: None,\n@@ -785,13 +673,9 @@ fn copies_in_entire_tree_by_similarity() -> crate::Result {\n },\n Modification {\n location: \"b\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f198d0640214092732566fb00543163845c8252c),\n },\n ]\n@@ -828,15 +712,11 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {\n [\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n location: \"c6\",\n relation: None,\n@@ -844,15 +724,11 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {\n },\n Rewrite {\n source_location: \"dir/c6\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n location: \"c7\",\n relation: None,\n@@ -860,21 +736,15 @@ fn copies_in_entire_tree_by_similarity_with_limit() -> crate::Result {\n },\n Modification {\n location: \"b\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(54781fa52cf133fa9d0bf59cfe2ef2621b5ad29f),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(f198d0640214092732566fb00543163845c8252c),\n },\n Addition {\n location: \"newly-added\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(97b3d1a5707f8a11fa5fa8bc6c3bd7b3965601fd),\n },\n ]\n@@ -910,26 +780,18 @@ fn copies_by_similarity_with_limit() -> crate::Result {\n [\n Modification {\n location: \"dir\",\n- previous_entry_mode: EntryMode(\n- 16384,\n- ),\n+ previous_entry_mode: EntryMode(0o40000),\n previous_id: Sha1(f01fd5b4d733a4ae749cbb58a828cdb3f342f298),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(1d7e20e07562a54af0408fd2669b0c56a6faa6f0),\n },\n Rewrite {\n source_location: \"base\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(3bb459b831ea471b9cd1cbb7c6d54a74251a711b),\n location: \"c4\",\n relation: None,\n@@ -938,17 +800,13 @@ fn copies_by_similarity_with_limit() -> crate::Result {\n Addition {\n location: \"c5\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(08fe19ca4d2f79624f35333157d610811efc1aed),\n },\n Addition {\n location: \"dir/c6\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(cf7a729ca69bfabd0995fc9b083e86a18215bd91),\n },\n ]\n@@ -984,15 +842,11 @@ fn realistic_renames_by_identity() -> crate::Result {\n [\n Rewrite {\n source_location: \"git-index/src/file.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"git-index/src/file/mod.rs\",\n relation: None,\n@@ -1001,20 +855,14 @@ fn realistic_renames_by_identity() -> crate::Result {\n Addition {\n location: \"git-index/tests/index/file/access.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Modification {\n location: \"git-index/tests/index/file/mod.rs\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(8ba3a16384aacc37d01564b28401755ce8053f51),\n },\n ]\n@@ -1070,36 +918,26 @@ fn realistic_renames_disabled() -> crate::Result {\n Deletion {\n location: \"git-index/src/file.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"git-index/src/file/mod.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"git-index/tests/index/file/access.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Modification {\n location: \"git-index/tests/index/file/mod.rs\",\n- previous_entry_mode: EntryMode(\n- 33188,\n- ),\n+ previous_entry_mode: EntryMode(0o100644),\n previous_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(8ba3a16384aacc37d01564b28401755ce8053f51),\n },\n ]\n@@ -1161,9 +999,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(0026010e87631065a2739f627622feb14f903fd4),\n },\n Addition {\n@@ -1173,9 +1009,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(0026010e87631065a2739f627622feb14f903fd4),\n },\n Deletion {\n@@ -1185,9 +1019,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1197,9 +1029,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1209,9 +1039,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1221,9 +1049,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1233,9 +1059,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1245,9 +1069,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1257,9 +1079,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1269,9 +1089,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1281,9 +1099,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1293,9 +1109,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1305,9 +1119,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1317,9 +1129,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1329,9 +1139,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1341,9 +1149,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n@@ -1353,9 +1159,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 1,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n@@ -1365,9 +1169,7 @@ fn realistic_renames_disabled_2() -> crate::Result {\n 2,\n ),\n ),\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n ]\n@@ -1456,33 +1258,25 @@ fn realistic_renames_disabled_3() -> crate::Result {\n Addition {\n location: \"src/ein.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Addition {\n location: \"src/gix.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n location: \"src/plumbing-cli.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n Deletion {\n location: \"src/porcelain-cli.rs\",\n relation: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n },\n ]\n@@ -1539,15 +1333,11 @@ fn realistic_renames_by_identity_3() -> crate::Result {\n [\n Rewrite {\n source_location: \"src/plumbing-cli.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"src/ein.rs\",\n relation: None,\n@@ -1555,15 +1345,11 @@ fn realistic_renames_by_identity_3() -> crate::Result {\n },\n Rewrite {\n source_location: \"src/porcelain-cli.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: None,\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"src/gix.rs\",\n relation: None,\n@@ -1629,9 +1415,7 @@ fn realistic_renames_2() -> crate::Result {\n [\n Rewrite {\n source_location: \"git-sec\",\n- source_entry_mode: EntryMode(\n- 16384,\n- ),\n+ source_entry_mode: EntryMode(0o40000),\n source_relation: Some(\n Parent(\n 1,\n@@ -1639,9 +1423,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(0026010e87631065a2739f627622feb14f903fd4),\n diff: None,\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(0026010e87631065a2739f627622feb14f903fd4),\n location: \"gix-sec\",\n relation: Some(\n@@ -1653,9 +1435,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/CHANGELOG.md\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1663,9 +1443,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/CHANGELOG.md\",\n relation: Some(\n@@ -1677,9 +1455,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/Cargo.toml\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1687,9 +1463,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/Cargo.toml\",\n relation: Some(\n@@ -1701,9 +1475,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/src/identity.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1711,9 +1483,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/src/identity.rs\",\n relation: Some(\n@@ -1725,9 +1495,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/src/lib.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1735,9 +1503,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/src/lib.rs\",\n relation: Some(\n@@ -1749,9 +1515,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/src/permission.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1759,9 +1523,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/src/permission.rs\",\n relation: Some(\n@@ -1773,9 +1535,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/src/trust.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1783,9 +1543,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/src/trust.rs\",\n relation: Some(\n@@ -1797,9 +1555,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/tests/sec.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1807,9 +1563,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/tests/sec.rs\",\n relation: Some(\n@@ -1821,9 +1575,7 @@ fn realistic_renames_2() -> crate::Result {\n },\n Rewrite {\n source_location: \"git-sec/tests/identity/mod.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 1,\n@@ -1831,9 +1583,7 @@ fn realistic_renames_2() -> crate::Result {\n ),\n source_id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(e69de29bb2d1d6434b8b29ae775ad8c2e48c5391),\n location: \"gix-sec/tests/identity/mod.rs\",\n relation: Some(\n@@ -1927,9 +1677,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n [\n Rewrite {\n source_location: \"src/plumbing/options.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 2,\n@@ -1937,9 +1685,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n ),\n source_id: Sha1(00750edc07d6415dcc07ae0351e9397b0222b7ba),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(00750edc07d6415dcc07ae0351e9397b0222b7ba),\n location: \"src/plumbing-renamed/options/mod.rs\",\n relation: Some(\n@@ -1951,9 +1697,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n },\n Rewrite {\n source_location: \"src/plumbing/mod.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 2,\n@@ -1961,9 +1705,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n ),\n source_id: Sha1(0cfbf08886fca9a91cb753ec8734c84fcbe52c9f),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(0cfbf08886fca9a91cb753ec8734c84fcbe52c9f),\n location: \"src/plumbing-renamed/mod.rs\",\n relation: Some(\n@@ -1975,9 +1717,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n },\n Rewrite {\n source_location: \"src/plumbing/main.rs\",\n- source_entry_mode: EntryMode(\n- 33188,\n- ),\n+ source_entry_mode: EntryMode(0o100644),\n source_relation: Some(\n ChildOfParent(\n 2,\n@@ -1985,9 +1725,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n ),\n source_id: Sha1(d00491fd7e5bb6fa28c517a0bb32b8b506539d4d),\n diff: None,\n- entry_mode: EntryMode(\n- 33188,\n- ),\n+ entry_mode: EntryMode(0o100644),\n id: Sha1(d00491fd7e5bb6fa28c517a0bb32b8b506539d4d),\n location: \"src/plumbing-renamed/main.rs\",\n relation: Some(\n@@ -1999,9 +1737,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n },\n Rewrite {\n source_location: \"src/plumbing\",\n- source_entry_mode: EntryMode(\n- 16384,\n- ),\n+ source_entry_mode: EntryMode(0o40000),\n source_relation: Some(\n Parent(\n 2,\n@@ -2009,9 +1745,7 @@ fn realistic_renames_3_without_identity() -> crate::Result {\n ),\n source_id: Sha1(b9d41dcdbd92fcab2fb6594d04f2ad99b3472621),\n diff: None,\n- entry_mode: EntryMode(\n- 16384,\n- ),\n+ entry_mode: EntryMode(0o40000),\n id: Sha1(202702465d7bb291153629dc2e8b353afe9cbdae),\n location: \"src/plumbing-renamed\",\n relation: Some(\n", "mod.rs": "@@ -42,10 +42,16 @@ pub struct Editor<'a> {\n ///\n /// Note that even though it can be created from any `u16`, it should be preferable to\n /// create it by converting [`EntryKind`] into `EntryMode`.\n-#[derive(Clone, Copy, PartialEq, Eq, Debug, Ord, PartialOrd, Hash)]\n+#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]\n #[cfg_attr(feature = \"serde\", derive(serde::Serialize, serde::Deserialize))]\n pub struct EntryMode(pub u16);\n \n+impl std::fmt::Debug for EntryMode {\n+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n+ write!(f, \"EntryMode({:#o})\", self.0)\n+ }\n+}\n+\n /// A discretized version of ideal and valid values for entry modes.\n ///\n /// Note that even though it can represent every valid [mode](EntryMode), it might\n"}
chore: mark query condition keys ('id:in' etc) as deprecated
7248762178d3c0386e78a72fd5e4cfa9701de882
chore
https://github.com/mikro-orm/mikro-orm/commit/7248762178d3c0386e78a72fd5e4cfa9701de882
mark query condition keys ('id:in' etc) as deprecated
{"query-conditions.md": "@@ -35,6 +35,8 @@ const res = await orm.em.find(Author, {\n \n Another way to do this by including the operator in your keys:\n \n+> This approach is deprecated and will be removed in future versions.\n+\n ```typescript\n const res = await orm.em.find(Author, { $and: [\n { 'id:in': [1, 2, 7] },\n"}
build(docker): simplify risingwave docker setup (#8126) Remove risingwave-specific minio service in favor of existing minio service.
f2ff173c1467c5921edfb0ac9790ff8b0340bfc9
build
https://github.com/rohankumardubey/ibis/commit/f2ff173c1467c5921edfb0ac9790ff8b0340bfc9
simplify risingwave docker setup (#8126) Remove risingwave-specific minio service in favor of existing minio service.
{"compose.yaml": "@@ -104,9 +104,10 @@ services:\n retries: 20\n test:\n - CMD-SHELL\n- - mc ready data && mc mb --ignore-existing data/trino\n+ - mc ready data && mc mb --ignore-existing data/trino data/risingwave\n networks:\n - trino\n+ - risingwave\n volumes:\n - $PWD/docker/minio/config.json:/.mc/config.json:ro\n \n@@ -537,74 +538,26 @@ services:\n networks:\n - impala\n \n- risingwave-minio:\n- image: \"quay.io/minio/minio:latest\"\n- command:\n- - server\n- - \"--address\"\n- - \"0.0.0.0:9301\"\n- - \"--console-address\"\n- - \"0.0.0.0:9400\"\n- - /data\n- expose:\n- - \"9301\"\n- - \"9400\"\n- ports:\n- - \"9301:9301\"\n- - \"9400:9400\"\n- depends_on: []\n- volumes:\n- - \"risingwave-minio:/data\"\n- entrypoint: /bin/sh -c \"set -e; mkdir -p \\\"/data/hummock001\\\"; /usr/bin/docker-entrypoint.sh \\\"$$0\\\" \\\"$$@\\\" \"\n- environment:\n- MINIO_CI_CD: \"1\"\n- MINIO_ROOT_PASSWORD: hummockadmin\n- MINIO_ROOT_USER: hummockadmin\n- MINIO_DOMAIN: \"risingwave-minio\"\n- container_name: risingwave-minio\n- healthcheck:\n- test:\n- - CMD-SHELL\n- - bash -c 'printf \\\"GET / HTTP/1.1\\n\\n\\\" > /dev/tcp/127.0.0.1/9301; exit $$?;'\n- interval: 5s\n- timeout: 5s\n- retries: 20\n- restart: always\n- networks:\n- - risingwave\n-\n risingwave:\n image: ghcr.io/risingwavelabs/risingwave:nightly-20240122\n command: \"standalone --meta-opts=\\\" \\\n --advertise-addr 0.0.0.0:5690 \\\n --backend mem \\\n- --state-store hummock+minio://hummockadmin:hummockadmin@risingwave-minio:9301/hummock001 \\\n- --data-directory hummock_001 \\\n- --config-path /risingwave.toml\\\" \\\n- --compute-opts=\\\" \\\n- --config-path /risingwave.toml \\\n- --advertise-addr 0.0.0.0:5688 \\\n- --role both \\\" \\\n- --frontend-opts=\\\" \\\n- --config-path /risingwave.toml \\\n- --listen-addr 0.0.0.0:4566 \\\n- --advertise-addr 0.0.0.0:4566 \\\" \\\n- --compactor-opts=\\\" \\\n- --advertise-addr 0.0.0.0:6660 \\\"\"\n- expose:\n- - \"4566\"\n+ --state-store hummock+minio://accesskey:secretkey@minio:9000/risingwave \\\n+ --data-directory hummock_001\\\" \\\n+ --compute-opts=\\\"--advertise-addr 0.0.0.0:5688 --role both\\\" \\\n+ --frontend-opts=\\\"--listen-addr 0.0.0.0:4566 --advertise-addr 0.0.0.0:4566\\\" \\\n+ --compactor-opts=\\\"--advertise-addr 0.0.0.0:6660\\\"\"\n ports:\n- - \"4566:4566\"\n+ - 4566:4566\n depends_on:\n- - risingwave-minio\n+ minio:\n+ condition: service_healthy\n volumes:\n- - \"./docker/risingwave/risingwave.toml:/risingwave.toml\"\n - risingwave:/data\n environment:\n RUST_BACKTRACE: \"1\"\n- # If ENABLE_TELEMETRY is not set, telemetry will start by default\n- ENABLE_TELEMETRY: ${ENABLE_TELEMETRY:-true}\n- container_name: risingwave\n+ ENABLE_TELEMETRY: \"false\"\n healthcheck:\n test:\n - CMD-SHELL\n@@ -612,10 +565,9 @@ services:\n - bash -c 'printf \\\"GET / HTTP/1.1\\n\\n\\\" > /dev/tcp/127.0.0.1/5688; exit $$?;'\n - bash -c 'printf \\\"GET / HTTP/1.1\\n\\n\\\" > /dev/tcp/127.0.0.1/4566; exit $$?;'\n - bash -c 'printf \\\"GET / HTTP/1.1\\n\\n\\\" > /dev/tcp/127.0.0.1/5690; exit $$?;'\n- interval: 5s\n- timeout: 5s\n+ interval: 1s\n retries: 20\n- restart: always\n+ restart: on-failure\n networks:\n - risingwave\n \n@@ -646,5 +598,4 @@ volumes:\n postgres:\n exasol:\n impala:\n- risingwave-minio:\n risingwave:\n", "risingwave.toml": "@@ -1,2 +0,0 @@\n-# RisingWave config file to be mounted into the Docker containers.\n-# See https://github.com/risingwavelabs/risingwave/blob/main/src/config/example.toml for example\n", "test_json.py": "@@ -41,8 +41,7 @@ pytestmark = [\n reason=\"https://github.com/ibis-project/ibis/pull/6920#discussion_r1373212503\",\n )\n @pytest.mark.broken(\n- [\"risingwave\"],\n- reason=\"TODO(Kexiang): order mismatch in array\",\n+ [\"risingwave\"], reason=\"TODO(Kexiang): order mismatch in array\", strict=False\n )\n def test_json_getitem(json_t, expr_fn, expected):\n expr = expr_fn(json_t)\n"}
test: dont export entities from single file tests
c49db6414b6b6416c16d0d0590e43bbf1162f0a7
test
https://github.com/mikro-orm/mikro-orm/commit/c49db6414b6b6416c16d0d0590e43bbf1162f0a7
dont export entities from single file tests
{"custom-pivot-entity-auto-discovery.sqlite.test.ts": "@@ -13,7 +13,7 @@ import {\n import { SqliteDriver } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Order {\n+class Order {\n \n @PrimaryKey()\n id!: number;\n@@ -33,7 +33,7 @@ export class Order {\n }\n \n @Entity()\n-export class Product {\n+class Product {\n \n @PrimaryKey()\n id!: number;\n@@ -55,7 +55,7 @@ export class Product {\n }\n \n @Entity()\n-export class OrderItem {\n+class OrderItem {\n \n [OptionalProps]?: 'amount';\n \n", "GH725.test.ts": "@@ -1,5 +1,4 @@\n import { EntitySchema, MikroORM, sql, Type, ValidationError } from '@mikro-orm/core';\n-import type { AbstractSqlDriver } from '@mikro-orm/knex';\n import { SqliteDriver } from '@mikro-orm/sqlite';\n import { PostgreSqlDriver } from '@mikro-orm/postgresql';\n \n@@ -105,13 +104,12 @@ export const TestSchema2 = new EntitySchema<Test2>({\n describe('GH issue 725', () => {\n \n test('mapping values from returning statement to custom types', async () => {\n- const orm = await MikroORM.init<AbstractSqlDriver>({\n+ const orm = await MikroORM.init({\n entities: [TestSchema],\n- dbName: `mikro_orm_test_gh_725`,\n+ dbName: 'mikro_orm_test_gh_725',\n driver: PostgreSqlDriver,\n });\n await orm.schema.ensureDatabase();\n- await orm.schema.execute('CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"');\n await orm.schema.dropSchema();\n await orm.schema.createSchema();\n \n@@ -142,7 +140,7 @@ describe('GH issue 725', () => {\n });\n \n test('validation when trying to persist not discovered entity', async () => {\n- const orm = await MikroORM.init<AbstractSqlDriver>({\n+ const orm = await MikroORM.init({\n entities: [TestSchema2],\n dbName: `:memory:`,\n driver: SqliteDriver,\n", "GH4242.test.ts": "@@ -47,7 +47,6 @@ beforeAll(async () => {\n });\n \n await orm.schema.ensureDatabase();\n- await orm.schema.execute('CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"');\n await orm.schema.refreshDatabase();\n });\n \n", "GH1003.test.ts": "@@ -2,7 +2,7 @@ import { BaseEntity, Collection, MikroORM, Entity, ManyToOne, OneToMany, Primary\n import type { Ref } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Parent extends BaseEntity {\n+class Parent extends BaseEntity {\n \n @PrimaryKey()\n id!: string;\n@@ -13,7 +13,7 @@ export class Parent extends BaseEntity {\n }\n \n @Entity()\n-export class Child extends BaseEntity {\n+class Child extends BaseEntity {\n \n @PrimaryKey()\n id!: string;\n", "GH1009.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Entity({ tableName: 'brands' })\n-export class Brand {\n+class Brand {\n \n @PrimaryKey()\n id!: number;\n@@ -12,7 +12,7 @@ export class Brand {\n }\n \n @Entity({ tableName: 'brand_site_restrictions' })\n-export class BrandSiteRestriction {\n+class BrandSiteRestriction {\n \n @PrimaryKey()\n id!: number;\n@@ -26,7 +26,7 @@ export class BrandSiteRestriction {\n }\n \n @Entity({ tableName: 'placements' })\n-export class Placement {\n+class Placement {\n \n @PrimaryKey()\n id!: number;\n@@ -40,7 +40,7 @@ export class Placement {\n }\n \n @Entity({ tableName: 'publishers' })\n-export class Publisher {\n+class Publisher {\n \n @OneToMany({ entity: () => Site, mappedBy: 'publisher' })\n sites = new Collection<Site>(this);\n@@ -51,7 +51,7 @@ export class Publisher {\n }\n \n @Entity({ tableName: 'sites' })\n-export class Site {\n+class Site {\n \n @ManyToOne({ entity: () => Publisher, nullable: true })\n publisher?: Publisher;\n", "GH1041.test.ts": "@@ -2,7 +2,7 @@ import { Collection, Entity, LoadStrategy, ManyToMany, MikroORM, PopulateHint, P\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class App {\n+class App {\n \n @PrimaryKey()\n id!: number;\n@@ -16,7 +16,7 @@ export class App {\n }\n \n @Entity()\n-export class User {\n+class User {\n \n @PrimaryKey()\n id!: number;\n", "GH1115.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey()\n id!: number;\n@@ -12,7 +12,7 @@ export class B {\n }\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey()\n id!: number;\n", "GH1171.test.ts": "@@ -2,7 +2,7 @@ import { Entity, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro-orm/sql\n import { v4 } from 'uuid';\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey()\n id: string = v4();\n@@ -13,7 +13,7 @@ export class B {\n }\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey()\n id!: string;\n", "GH1395.test.ts": "@@ -6,7 +6,7 @@ export interface EmailMessageTest {\n }\n \n @Entity()\n-export class TestTemplate {\n+class TestTemplate {\n \n @PrimaryKey()\n _id!: ObjectId;\n", "GH1616.test.ts": "@@ -1,7 +1,7 @@\n import { Embeddable, Embedded, Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Embeddable()\n-export class D {\n+class D {\n \n @Property({ type: 'boolean', nullable: true })\n test?: boolean = false;\n@@ -9,7 +9,7 @@ export class D {\n }\n \n @Embeddable()\n-export class C {\n+class C {\n \n @Embedded(() => D, { object: true, nullable: false })\n d!: D;\n@@ -17,7 +17,7 @@ export class C {\n }\n \n @Embeddable()\n-export class B {\n+class B {\n \n @Embedded(() => C, { object: true, nullable: false })\n c!: C;\n@@ -28,7 +28,7 @@ export class B {\n }\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey()\n id!: number;\n", "GH1626.test.ts": "@@ -6,7 +6,7 @@ import {\n Property,\n } from '@mikro-orm/sqlite';\n import { mockLogger } from '../helpers';\n-export class NativeBigIntType extends BigIntType {\n+class NativeBigIntType extends BigIntType {\n \n override convertToJSValue(value: any): any {\n if (!value) {\n@@ -19,7 +19,7 @@ export class NativeBigIntType extends BigIntType {\n }\n \n @Entity()\n-export class Author {\n+class Author {\n \n @PrimaryKey({ type: NativeBigIntType, comment: 'PK' })\n id!: bigint;\n", "GH1704.test.ts": "@@ -2,7 +2,7 @@ import { Entity, PrimaryKey, Property, OneToOne, MikroORM } from '@mikro-orm/sql\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class Profile {\n+class Profile {\n \n @PrimaryKey()\n id!: number;\n", "GH1721.test.ts": "@@ -2,7 +2,7 @@ import { Entity, MikroORM, PrimaryKey, Property, Type } from '@mikro-orm/sqlite'\n import { Guid } from 'guid-typescript';\n import { mockLogger } from '../helpers';\n \n-export class GuidType extends Type<Guid | undefined, string | undefined> {\n+class GuidType extends Type<Guid | undefined, string | undefined> {\n \n override convertToDatabaseValue(value: Guid | undefined): string | undefined {\n if (!value) {\n@@ -27,7 +27,7 @@ export class GuidType extends Type<Guid | undefined, string | undefined> {\n }\n \n @Entity()\n-export class Couch {\n+class Couch {\n \n @PrimaryKey({ type: GuidType })\n id!: Guid;\n", "GH1902.test.ts": "@@ -14,7 +14,7 @@ import {\n } from '@mikro-orm/sqlite';\n \n @Entity({ tableName: 'users' })\n-export class UserEntity {\n+class UserEntity {\n \n @PrimaryKey({ type: 'number' })\n id!: number;\n@@ -32,7 +32,7 @@ export class UserEntity {\n }\n \n @Entity({ tableName: 'tenants' })\n-export class TenantEntity {\n+class TenantEntity {\n \n [OptionalProps]?: 'isEnabled';\n \n", "GH1910.test.ts": "@@ -2,7 +2,7 @@ import type { EntityManager } from '@mikro-orm/postgresql';\n import { Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey({ type: 'number' })\n id!: number;\n", "GH1927.test.ts": "@@ -2,7 +2,7 @@ import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Propert\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class Author {\n+class Author {\n \n @PrimaryKey()\n id!: number;\n@@ -20,7 +20,7 @@ export class Author {\n }\n \n @Entity()\n-export class Book {\n+class Book {\n \n @PrimaryKey()\n id!: number;\n", "GH2273.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, LoadStrategy, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Checkout {\n+class Checkout {\n \n @PrimaryKey()\n id!: number;\n@@ -14,7 +14,7 @@ export class Checkout {\n }\n \n @Entity()\n-export class Discount {\n+class Discount {\n \n @PrimaryKey()\n id!: number;\n@@ -35,7 +35,7 @@ export class Discount {\n }\n \n @Entity()\n-export class Checkout2 {\n+class Checkout2 {\n \n @PrimaryKey()\n id!: number;\n@@ -49,7 +49,7 @@ export class Checkout2 {\n }\n \n @Entity()\n-export class Discount2 {\n+class Discount2 {\n \n @PrimaryKey()\n id!: number;\n", "GH228.test.ts": "@@ -2,7 +2,7 @@ import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/sq\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey({ type: 'number' })\n id!: number;\n", "GH2379.test.ts": "@@ -2,7 +2,7 @@ import { Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, OptionalProps,\n import { performance } from 'perf_hooks';\n \n @Entity()\n-export class VendorBuyerRelationship {\n+class VendorBuyerRelationship {\n \n [OptionalProps]?: 'created';\n \n@@ -24,7 +24,7 @@ export class VendorBuyerRelationship {\n }\n \n @Entity()\n-export class Member {\n+class Member {\n \n [OptionalProps]?: 'created';\n \n@@ -49,7 +49,7 @@ export class Member {\n }\n \n @Entity()\n-export class Job {\n+class Job {\n \n [OptionalProps]?: 'rejected';\n \n", "GH2395.test.ts": "@@ -1,7 +1,7 @@\n import { Cascade, Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Parent {\n+class Parent {\n \n @PrimaryKey()\n id!: number;\n@@ -18,7 +18,7 @@ export class Parent {\n }\n \n @Entity()\n-export class Child {\n+class Child {\n \n @PrimaryKey()\n id!: number;\n@@ -29,7 +29,7 @@ export class Child {\n }\n \n @Entity()\n-export class Child2 {\n+class Child2 {\n \n @PrimaryKey()\n id!: number;\n", "GH2406.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, Ref, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';\n \n @Entity({ forceConstructor: true })\n-export class Parent {\n+class Parent {\n \n @PrimaryKey()\n id!: number;\n@@ -12,7 +12,7 @@ export class Parent {\n }\n \n @Entity({ forceConstructor: true })\n-export class Child {\n+class Child {\n \n @PrimaryKey()\n id!: number;\n", "GH2583.test.ts": "@@ -7,7 +7,7 @@ export enum WithEnumArrayValue {\n }\n \n @Entity()\n-export class WithEnumArray {\n+class WithEnumArray {\n \n @PrimaryKey()\n id!: number;\n", "GH2675.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, LoadStrategy, ManyToOne, MikroORM, PrimaryKey, wrap } from '@mikro-orm/postgresql';\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey()\n id!: number;\n@@ -9,7 +9,7 @@ export class A {\n }\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey()\n id!: number;\n", "GH2774.test.ts": "@@ -1,7 +1,7 @@\n import { Embeddable, Embedded, Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Embeddable()\n-export class Nested {\n+class Nested {\n \n @Property({ nullable: true })\n value: string | null = null;\n@@ -9,7 +9,7 @@ export class Nested {\n }\n \n @Embeddable()\n-export class Name {\n+class Name {\n \n @Property({ nullable: true })\n value: string | null = null;\n@@ -20,7 +20,7 @@ export class Name {\n }\n \n @Entity()\n-export class User {\n+class User {\n \n @PrimaryKey()\n id!: number;\n", "GH2781.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, ManyToOne, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';\n \n @Entity()\n-export class Address {\n+class Address {\n \n @PrimaryKey()\n id!: number;\n@@ -22,7 +22,7 @@ export class Address {\n }\n \n @Entity()\n-export class Customer {\n+class Customer {\n \n @PrimaryKey()\n id!: number;\n", "GH2784.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, MikroORM, PrimaryKey, Property } from '@mikro-orm/postgresql';\n \n @Entity()\n-export class Address {\n+class Address {\n \n @PrimaryKey()\n id!: number;\n", "GH2815.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, MikroORM, OneToOne, PrimaryKey } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Position {\n+class Position {\n \n @PrimaryKey()\n id!: number;\n@@ -12,7 +12,7 @@ export class Position {\n }\n \n @Entity()\n-export class Leg {\n+class Leg {\n \n @PrimaryKey()\n id!: number;\n@@ -23,7 +23,7 @@ export class Leg {\n }\n \n @Entity()\n-export class Position2 {\n+class Position2 {\n \n @PrimaryKey()\n id!: number;\n@@ -34,7 +34,7 @@ export class Position2 {\n }\n \n @Entity()\n-export class Leg2 {\n+class Leg2 {\n \n @PrimaryKey()\n id!: number;\n", "GH2821.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, MikroORM, OneToOne, PrimaryKey } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Position {\n+class Position {\n \n @PrimaryKey()\n id!: number;\n@@ -15,7 +15,7 @@ export class Position {\n }\n \n @Entity()\n-export class Leg {\n+class Leg {\n \n @PrimaryKey()\n id!: number;\n", "GH2882.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, wrap } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Parent {\n+class Parent {\n \n @PrimaryKey()\n id!: number;\n", "GH2974.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Property, wrap } from '@mikro-orm/better-sqlite';\n \n @Entity()\n-export class SomeMany {\n+class SomeMany {\n \n @PrimaryKey()\n id!: number;\n@@ -15,7 +15,7 @@ export class SomeMany {\n }\n \n @Entity()\n-export class Test {\n+class Test {\n \n @PrimaryKey()\n id!: number;\n", "GH302.test.ts": "@@ -1,7 +1,7 @@\n import { Entity, Ref, MikroORM, PrimaryKey, Property, Reference, ManyToOne, OneToMany, Collection } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey({ type: 'number' })\n id: number;\n@@ -20,7 +20,7 @@ export class A {\n }\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey({ type: 'number' })\n id!: number;\n", "GH3026.test.ts": "@@ -2,7 +2,7 @@ import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey, Propert\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class Ingredient {\n+class Ingredient {\n \n @PrimaryKey()\n id!: number;\n@@ -16,7 +16,7 @@ export class Ingredient {\n }\n \n @Entity()\n-export class Recipe {\n+class Recipe {\n \n @PrimaryKey()\n id!: number;\n@@ -30,7 +30,7 @@ export class Recipe {\n }\n \n @Entity()\n-export class RecipeIngredient {\n+class RecipeIngredient {\n \n @PrimaryKey()\n id!: number;\n", "GH3240.test.ts": "@@ -3,7 +3,7 @@ import { Collection, Entity, ManyToMany, MikroORM, PrimaryKey, Property } from '\n type SquadType = 'GROUND' | 'AIR';\n \n @Entity()\n-export class Soldier {\n+class Soldier {\n \n @PrimaryKey()\n id!: number;\n@@ -20,7 +20,7 @@ export class Soldier {\n }\n \n @Entity()\n-export class Squad {\n+class Squad {\n \n @PrimaryKey()\n id!: number;\n", "GH3287.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, LoadStrategy, ManyToMany, MikroORM, PrimaryKey } from '@mikro-orm/better-sqlite';\n \n @Entity()\n-export class Group {\n+class Group {\n \n @PrimaryKey()\n id!: number;\n@@ -15,7 +15,7 @@ export class Group {\n }\n \n @Entity()\n-export class Participant {\n+class Participant {\n \n @PrimaryKey()\n id!: number;\n", "GH3490.test.ts": "@@ -1,7 +1,7 @@\n import { Collection, Entity, ManyToOne, MikroORM, OneToMany, PrimaryKey } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Contract {\n+class Contract {\n \n @PrimaryKey()\n id!: number;\n@@ -12,7 +12,7 @@ export class Contract {\n }\n \n @Entity()\n-export class Customer {\n+class Customer {\n \n @PrimaryKey()\n id!: number;\n", "GH3548.test.ts": "@@ -1,7 +1,7 @@\n import { MikroORM, ObjectId, Entity, PrimaryKey, Property, OneToOne } from '@mikro-orm/mongodb';\n \n @Entity()\n-export class Author {\n+class Author {\n \n @PrimaryKey()\n _id!: ObjectId;\n@@ -15,7 +15,7 @@ export class Author {\n }\n \n @Entity()\n-export class AuthorDetail {\n+class AuthorDetail {\n \n @PrimaryKey()\n _id!: ObjectId;\n", "GH3696.test.ts": "@@ -2,7 +2,7 @@ import { FullTextType, MikroORM, Collection, Entity, Index, ManyToMany, PrimaryK\n \n @Entity()\n @Unique({ properties: ['name'] })\n-export class Artist {\n+class Artist {\n \n @PrimaryKey()\n id!: number;\n@@ -23,7 +23,7 @@ export class Artist {\n }\n \n @Entity()\n-export class Song {\n+class Song {\n \n @PrimaryKey()\n id!: number;\n", "GH3738.test.ts": "@@ -12,7 +12,7 @@ import {\n import { randomUUID } from 'crypto';\n \n @Entity()\n-export class Question {\n+class Question {\n \n [OptionalProps]?: 'createdAt';\n \n@@ -31,7 +31,7 @@ export class Question {\n }\n \n @Entity()\n-export class Answer {\n+class Answer {\n \n [OptionalProps]?: 'createdAt' | 'question';\n \n", "GH3844.test.ts": "@@ -2,7 +2,7 @@ import { Entity, PrimaryKey, Property, OneToOne, Ref, ref } from '@mikro-orm/cor\n import { MikroORM } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class GamePoolEntity {\n+class GamePoolEntity {\n \n @PrimaryKey()\n contract_address!: string;\n@@ -36,7 +36,7 @@ export class GamePoolEntity {\n }\n \n @Entity()\n-export class GamePoolScannerEntity {\n+class GamePoolScannerEntity {\n \n @OneToOne(() => GamePoolEntity, e => e.scanner, {\n primary: true,\n", "GH4295.test.ts": "@@ -14,7 +14,7 @@ class RunScheduleEntity {\n }\n \n @Entity()\n-export class AEntity {\n+class AEntity {\n \n @PrimaryKey()\n id!: number;\n", "GH4343.test.ts": "@@ -3,7 +3,7 @@ import { Entity, ManyToOne, PrimaryKey, Property, ref, Ref } from '@mikro-orm/co\n import { v4 } from 'uuid';\n \n @Entity()\n-export class LocalizedString {\n+class LocalizedString {\n \n @PrimaryKey({ type: 'uuid' })\n id = v4();\n@@ -21,7 +21,7 @@ export class LocalizedString {\n }\n \n @Entity()\n-export class Book {\n+class Book {\n \n @PrimaryKey({ type: 'uuid' })\n id = v4();\n", "GH4533.test.ts": "@@ -15,7 +15,7 @@ import { SqliteDriver } from '@mikro-orm/sqlite';\n import { mockLogger } from '../helpers';\n \n @Entity({ tableName: 'core_users' })\n-export class User {\n+class User {\n \n @PrimaryKey()\n id!: number;\n@@ -33,7 +33,7 @@ export class User {\n }\n \n @Entity({ tableName: 'core_roles' })\n-export class Role {\n+class Role {\n \n @PrimaryKey()\n id!: number;\n", "GH4973.test.ts": "@@ -2,7 +2,7 @@ import { Collection, Entity, OneToMany, MikroORM, PrimaryKey, Property, ManyToOn\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class User {\n+class User {\n \n @PrimaryKey()\n id!: number;\n@@ -13,7 +13,7 @@ export class User {\n }\n \n @Entity()\n-export class Book {\n+class Book {\n \n @PrimaryKey()\n id!: number;\n", "GH557.test.ts": "@@ -1,7 +1,7 @@\n import { MikroORM, Entity, ManyToOne, OneToOne, PrimaryKey, Property } from '@mikro-orm/sqlite';\n \n @Entity()\n-export class Rate {\n+class Rate {\n \n @PrimaryKey()\n id!: number;\n@@ -22,7 +22,7 @@ export class Rate {\n }\n \n @Entity()\n-export class Application {\n+class Application {\n \n @PrimaryKey()\n id!: number;\n", "GH572.test.ts": "@@ -2,7 +2,7 @@ import { Entity, Ref, MikroORM, OneToOne, PrimaryKey, Property, QueryOrder } fro\n import { mockLogger } from '../helpers';\n \n @Entity()\n-export class A {\n+class A {\n \n @PrimaryKey()\n id!: number;\n@@ -13,7 +13,7 @@ export class A {\n }\n \n @Entity()\n-export class B {\n+class B {\n \n @PrimaryKey()\n id!: number;\n", "GH755.test.ts": "@@ -1,6 +1,6 @@\n import { EntitySchema, MikroORM } from '@mikro-orm/sqlite';\n \n-export class Test {\n+class Test {\n \n id!: string;\n createdAt!: Date;\n", "GH811.test.ts": "@@ -2,7 +2,7 @@ import { Entity, helper, MikroORM, OneToOne, PrimaryKey, Property } from '@mikro\n import { v4 } from 'uuid';\n \n @Entity()\n-export class Address {\n+class Address {\n \n @PrimaryKey({ type: 'uuid' })\n id = v4();\n@@ -13,7 +13,7 @@ export class Address {\n }\n \n @Entity()\n-export class Contact {\n+class Contact {\n \n @PrimaryKey({ type: 'uuid' })\n id = v4();\n@@ -27,7 +27,7 @@ export class Contact {\n }\n \n @Entity()\n-export class Employee {\n+class Employee {\n \n @PrimaryKey({ type: 'uuid' })\n id = v4();\n", "sqlite-constraints.test.ts": "@@ -2,9 +2,8 @@ import { Entity, type EntityManager, ManyToOne, MikroORM, PrimaryKey, Property,\n import { SqliteDriver } from '@mikro-orm/sqlite';\n import { BetterSqliteDriver } from '@mikro-orm/better-sqlite';\n \n-\n @Entity()\n-export class Author {\n+class Author {\n \n @PrimaryKey({ type: 'string' })\n id!: string;\n@@ -14,9 +13,8 @@ export class Author {\n \n }\n \n-\n @Entity()\n-export class Book {\n+class Book {\n \n @PrimaryKey({ type: 'string' })\n id!: string;\n@@ -29,7 +27,6 @@ export class Book {\n \n }\n \n-\n async function createEntities(em: EntityManager) {\n const author = new Author();\n author.id = '1';\n@@ -44,7 +41,6 @@ async function createEntities(em: EntityManager) {\n return author;\n }\n \n-\n describe('sqlite driver', () => {\n \n let orm: MikroORM<SqliteDriver>;\n@@ -70,7 +66,6 @@ describe('sqlite driver', () => {\n });\n });\n \n-\n describe('better-sqlite driver', () => {\n \n let orm: MikroORM<BetterSqliteDriver>;\n@@ -95,4 +90,3 @@ describe('better-sqlite driver', () => {\n }\n });\n });\n-\n"}
chore: impl some error conversions
ed0f8e1d57380fe5b76248bf8dd88973898718c4
chore
https://github.com/erg-lang/erg/commit/ed0f8e1d57380fe5b76248bf8dd88973898718c4
impl some error conversions
{"mod.rs": "@@ -186,6 +186,12 @@ impl From<ParserRunnerError> for CompileError {\n }\n }\n \n+impl From<CompileError> for ParserRunnerErrors {\n+ fn from(err: CompileError) -> Self {\n+ Self::new(vec![err.into()])\n+ }\n+}\n+\n impl From<CompileError> for ParserRunnerError {\n fn from(err: CompileError) -> Self {\n Self {\n", "error.rs": "@@ -609,6 +609,12 @@ impl ParserRunnerError {\n }\n }\n \n+impl From<ParserRunnerError> for LexError {\n+ fn from(err: ParserRunnerError) -> Self {\n+ Self::new(err.core)\n+ }\n+}\n+\n #[derive(Debug)]\n pub struct ParserRunnerErrors(Vec<ParserRunnerError>);\n \n@@ -618,6 +624,12 @@ impl_stream!(ParserRunnerErrors, ParserRunnerError);\n \n impl MultiErrorDisplay<ParserRunnerError> for ParserRunnerErrors {}\n \n+impl From<ParserRunnerErrors> for LexErrors {\n+ fn from(errs: ParserRunnerErrors) -> Self {\n+ Self(errs.0.into_iter().map(LexError::from).collect())\n+ }\n+}\n+\n impl fmt::Display for ParserRunnerErrors {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n self.fmt_all(f)\n"}
chore(deps): relock
94959b143e68b92360441c7383e1930ff986e5e5
chore
https://github.com/rohankumardubey/ibis/commit/94959b143e68b92360441c7383e1930ff986e5e5
relock
{"poetry.lock": "@@ -4855,83 +4855,6 @@ dev = [\"coverage[toml] (==5.0.4)\", \"cryptography (>=3.4.0)\", \"pre-commit\", \"pyte\n docs = [\"sphinx (>=4.5.0,<5.0.0)\", \"sphinx-rtd-theme\", \"zope.interface\"]\n tests = [\"coverage[toml] (==5.0.4)\", \"pytest (>=6.0.0,<7.0.0)\"]\n \n-[[package]]\n-name = \"pymssql\"\n-version = \"2.2.11\"\n-description = \"DB-API interface to Microsoft SQL Server for Python. (new Cython-based version)\"\n-optional = true\n-python-versions = \"*\"\n-files = [\n- {file = \"pymssql-2.2.11-cp310-cp310-macosx_11_0_x86_64.whl\", hash = \"sha256:692ab328ac290bd2031bc4dd6deae32665dfffda1b12aaa92928d3ebc667d5ad\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl\", hash = \"sha256:723a4612421027a01b51e42e786678a18c4a27613a3ccecf331c026e0cc41353\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl\", hash = \"sha256:34ab2373ca607174ad7244cfe955c07b6bc77a1e21d3c3143dbe934dec82c3a4\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:1bc0ba19b4426c57509f065a03748d9ac230f1543ecdac57175e6ebd213a7bc0\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:e8d9d42a50f6e8e6b356e4e8b2fa1da725344ec0be6f8a6107b7196e5bd74906\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:aec64022a2419fad9f496f8e310522635e39d092970e1d55375ea0be86725174\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-manylinux_2_28_x86_64.whl\", hash = \"sha256:c389c8041c94d4058827faf5735df5f8e4c1c1eebdd051859536dc393925a667\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-win32.whl\", hash = \"sha256:6452326cecd4dcee359a6f8878b827118a8c8523cd24de5b3a971a7a172e4275\"},\n- {file = \"pymssql-2.2.11-cp310-cp310-win_amd64.whl\", hash = \"sha256:c1bde266dbc91b100abd0311102a6585df09cc963599421cc12fd6b4cfa8e3d3\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:6ddaf0597138179517bdbf5b5aa3caffee65987316dc906359a5d0801d0847ee\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0c26af25991715431559cb5b37f243b8ff676540f504ed0317774dfc71827af1\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:410e8c40b7c1b421e750cf80ccf2da8d802ed815575758ac9a78c5f6cd995723\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:fa1767239ed45e1fa91d82fc0c63305750530787cd64089cabbe183eb538a35b\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-manylinux_2_28_x86_64.whl\", hash = \"sha256:9a644e4158fed30ae9f3846f2f1c74d36fa1610eb552de35b7f611d063fa3c85\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-win32.whl\", hash = \"sha256:1956c111debe67f69a9c839b33ce420f0e8def1ef5ff9831c03d8ac840f82376\"},\n- {file = \"pymssql-2.2.11-cp311-cp311-win_amd64.whl\", hash = \"sha256:0bdd1fb49b0e331e47e83f39d4af784c857e230bfc73519654bab29285c51c63\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-macosx_10_9_universal2.whl\", hash = \"sha256:2609bbd3b715822bb4fa6d457b2985d32ad6ab9580fdb61ae6e0eee251791d24\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:c382aea9adaaee189f352d7a493e3f76c13f9337ec2b6aa40e76b114fa13ebac\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:5928324a09de7466368c15ece1de4ab5ea968d24943ceade758836f9fc7149f5\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:ee8b10f797d0bfec626b803891cf9e98480ee11f2e8459a7616cdb7e4e4bf2de\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-manylinux_2_28_x86_64.whl\", hash = \"sha256:1d5aa1a090b17f4ba75ffac3bb371f6c8c869692b653689396f9b470fde06981\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-win32.whl\", hash = \"sha256:1f7ba71cf81af65c005173f279928bf86700d295f97e4965e169b5764bc6c4f2\"},\n- {file = \"pymssql-2.2.11-cp312-cp312-win_amd64.whl\", hash = \"sha256:a0ebb0e40c93f8f1e40aad80f512ae4aa89cb1ec8a96964b9afedcff1d5813fd\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-macosx_10_14_x86_64.whl\", hash = \"sha256:e0ed115902956efaca9d9a20fa9b2b604e3e11d640416ca74900d215cdcbf3ab\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl\", hash = \"sha256:1a75afa17746972bb61120fb6ea907657fc1ab68250bbbd8b21a00d0720ed0f4\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl\", hash = \"sha256:d2ae69d8e46637a203cfb48e05439fc9e2ff7646fa1f5396aa3577ce52810031\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:f13710240457ace5b8c9cca7f4971504656f5703b702895a86386e87c7103801\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:d7234b0f61dd9ccb2304171b5fd7ed9db133b4ea7c835c9942c9dc5bfc00c1cb\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:0dcd76a8cc757c7cfe2d235f232a20d74ac8cebf9feabcdcbda5ef33157d14b1\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_28_x86_64.whl\", hash = \"sha256:84aff3235ad1289c4079c548cfcdf7eaaf2475b9f81557351deb42e8f45a9c2d\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:5b081aa7b02911e3f299f7d1f68ce8ca585a5119d44601bf4483da0aae8c2181\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl\", hash = \"sha256:d315f08c106c884d6b42f70c9518e765a5bc23f6d3a587346bc4e6f198768c7a\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-win32.whl\", hash = \"sha256:c8b35b3d5e326729e5edb73d593103d2dbfb474bd36ee95b4e85e1f8271ba98a\"},\n- {file = \"pymssql-2.2.11-cp36-cp36m-win_amd64.whl\", hash = \"sha256:139c5032e0a2765764987803f1266132fcc5da572848ccc4d29cebba794a4260\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-macosx_11_0_x86_64.whl\", hash = \"sha256:7bac28aed1d625a002e0289e0c18d1808cecbdc12e2a1a3927dbbaff66e5fff3\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl\", hash = \"sha256:4eeaacc1dbbc678f4e80c6fd6fc279468021fdf2e486adc8631ec0de6b6c0e62\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl\", hash = \"sha256:428e32e53c554798bc2d0682a169fcb681df6b68544c4aedd1186018ea7e0447\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:2b621c5e32136dabc2fea25696beab0647ec336d25c04ab6d8eb8c8ee92f0e52\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:658c85474ea01ca3a30de769df06f46681e882524b05c6994cd6fd985c485f27\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:070181361ab94bdaeb14b591a35d853f327bc90c660b04047d474274fbb80357\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_28_x86_64.whl\", hash = \"sha256:492e49616b58b2d6caf4a2598cb344572870171a7b65ba1ac61a5e248b6a8e1c\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:803122aec31fbd52f5d65ef3b30b3bd2dc7b2a9e3a8223d16078a25805155c45\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl\", hash = \"sha256:09075e129655ab1178d2d60efb9b3fbf5cdb6da2338ecdb3a92c53a4ad7efa0c\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-win32.whl\", hash = \"sha256:b4a8377527702d746c490c2ce67d17f1c351d182b49b82fae6e67ae206bf9663\"},\n- {file = \"pymssql-2.2.11-cp37-cp37m-win_amd64.whl\", hash = \"sha256:167313d91606dc7a3c05b2ad60491a138b7408a8779599ab6430a48a67f133f0\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-macosx_11_0_x86_64.whl\", hash = \"sha256:8d418f4dca245421242ed9df59d3bcda0cd081650df6deb1bef7f157b6a6f9dd\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl\", hash = \"sha256:f0c44169df8d23c7ce172bd90ef5deb44caf19f15990e4db266e3193071988a4\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl\", hash = \"sha256:b78032e45ea33c55d430b93e55370b900479ea324fae5d5d32486cc0fdc0fedd\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:984d99ee6a2579f86c536b1b0354ad3dc9701e98a4b3953f1301b4695477cd2f\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:287c8f79a7eca0c6787405797bac0f7c502d9be151f3f823aae12042235f8426\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:85ea4ea296afcae34bc61e4e0ef2f503270fd4bb097b308a07a9194f1f063aa1\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_28_x86_64.whl\", hash = \"sha256:a114633fa02b7eb5bc63520bf07954106c0ed0ce032449c871abb8b8c435a872\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:7332db36a537cbc16640a0c3473a2e419aa5bc1f9953cada3212e7b2587de658\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl\", hash = \"sha256:cd7292d872948c1f67c8cc12158f2c8ed9873d54368139ce1f67b2262ac34029\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-win32.whl\", hash = \"sha256:fbca115e11685b5891755cc22b3db4348071b8d100a41e1ce93526d9c3dbf2d5\"},\n- {file = \"pymssql-2.2.11-cp38-cp38-win_amd64.whl\", hash = \"sha256:452b88a4ceca7efb934b5babb365851a3c52e723642092ebc92777397c2cacdb\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-macosx_11_0_x86_64.whl\", hash = \"sha256:001242cedc73587cbb10aec4069de50febbff3c4c50f9908a215476496b3beab\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl\", hash = \"sha256:da492482b923b9cc9ad37f0f5592c776279299db2a89c0b7fc931aaefec652d4\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl\", hash = \"sha256:139a833e6e72a624e4f2cde803a34a616d5661dd9a5b2ae0402d9d8a597b2f1f\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:e57fbfad252434d64bdf4b6a935e4241616a4cf8df7af58b9772cd91fce9309a\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:a5308507c2c4e94ede7e5b164870c1ba2be55abab6daf795b5529e2da4e838b6\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:bdca43c42d5f370358535b2107140ed550d74f9ef0fc95d2d7fa8c4e40ee48c2\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_28_x86_64.whl\", hash = \"sha256:fe0cc975aac87b364fdb55cb89642435c3e859dcd99d7260f48af94111ba2673\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:4551f50c8a3b6ffbd71f794ee1c0c0134134c5d6414302c2fa28b67fe4470d07\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl\", hash = \"sha256:ae9818df40588d5a49e7476f05e31cc83dea630d607178d66762ca8cf32e9f77\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-win32.whl\", hash = \"sha256:15257c7bd89c0283f70d6eaafd9b872201818572b8ba1e8576408ae23ef50c7c\"},\n- {file = \"pymssql-2.2.11-cp39-cp39-win_amd64.whl\", hash = \"sha256:65bb674c0ba35379bf93d1b2cf06fdc5e7ec56e1d0e9de525bdcf977190b2865\"},\n- {file = \"pymssql-2.2.11.tar.gz\", hash = \"sha256:15815bf1ff9edb475ec4ef567f23e23c4e828ce119ff5bf98a072b66b8d0ac1b\"},\n-]\n-\n [[package]]\n name = \"pymysql\"\n version = \"1.1.0\"\n@@ -7416,7 +7339,7 @@ cffi = {version = \">=1.11\", markers = \"platform_python_implementation == \\\"PyPy\\\n cffi = [\"cffi (>=1.11)\"]\n \n [extras]\n-all = [\"black\", \"clickhouse-connect\", \"dask\", \"datafusion\", \"db-dtypes\", \"deltalake\", \"duckdb\", \"duckdb-engine\", \"fsspec\", \"geoalchemy2\", \"geopandas\", \"google-cloud-bigquery\", \"google-cloud-bigquery-storage\", \"graphviz\", \"impyla\", \"oracledb\", \"packaging\", \"polars\", \"psycopg2\", \"pydata-google-auth\", \"pydruid\", \"pymssql\", \"pymysql\", \"pyspark\", \"regex\", \"requests\", \"shapely\", \"snowflake-connector-python\", \"snowflake-sqlalchemy\", \"sqlalchemy\", \"sqlalchemy-exasol\", \"sqlalchemy-views\", \"trino\"]\n+all = [\"black\", \"clickhouse-connect\", \"dask\", \"datafusion\", \"db-dtypes\", \"deltalake\", \"duckdb\", \"duckdb-engine\", \"geoalchemy2\", \"geopandas\", \"google-cloud-bigquery\", \"google-cloud-bigquery-storage\", \"graphviz\", \"impyla\", \"oracledb\", \"packaging\", \"polars\", \"psycopg2\", \"pydata-google-auth\", \"pydruid\", \"pymysql\", \"pyodbc\", \"pyspark\", \"regex\", \"shapely\", \"snowflake-connector-python\", \"snowflake-sqlalchemy\", \"sqlalchemy\", \"sqlalchemy-exasol\", \"sqlalchemy-views\", \"trino\"]\n bigquery = [\"db-dtypes\", \"google-cloud-bigquery\", \"google-cloud-bigquery-storage\", \"pydata-google-auth\"]\n clickhouse = [\"clickhouse-connect\", \"sqlalchemy\"]\n dask = [\"dask\", \"regex\"]\n@@ -7428,8 +7351,8 @@ duckdb = [\"duckdb\", \"duckdb-engine\", \"sqlalchemy\", \"sqlalchemy-views\"]\n exasol = [\"sqlalchemy\", \"sqlalchemy-exasol\", \"sqlalchemy-views\"]\n flink = []\n geospatial = [\"geoalchemy2\", \"geopandas\", \"shapely\"]\n-impala = [\"fsspec\", \"impyla\", \"requests\", \"sqlalchemy\"]\n-mssql = [\"pymssql\", \"sqlalchemy\", \"sqlalchemy-views\"]\n+impala = [\"impyla\", \"sqlalchemy\"]\n+mssql = [\"pyodbc\", \"sqlalchemy\", \"sqlalchemy-views\"]\n mysql = [\"pymysql\", \"sqlalchemy\", \"sqlalchemy-views\"]\n oracle = [\"oracledb\", \"packaging\", \"sqlalchemy\", \"sqlalchemy-views\"]\n pandas = [\"regex\"]\n@@ -7444,4 +7367,4 @@ visualization = [\"graphviz\"]\n [metadata]\n lock-version = \"2.0\"\n python-versions = \"^3.9\"\n-content-hash = \"e33849b55adc9ca33aa5b98b94dbeca72c6cef7e7150890fe3c3adba206b3892\"\n+content-hash = \"7cdedb3e9657196bfe4485e8cdb35998c826cca681595b38f39e1ba253c2886c\"\n"}
refactor: make EntityProperty interface generic (use keyof T on name)
8c9ee4d0c15200b4f2a2a93abc6885caf3e6f419
refactor
https://github.com/mikro-orm/mikro-orm/commit/8c9ee4d0c15200b4f2a2a93abc6885caf3e6f419
make EntityProperty interface generic (use keyof T on name)
{"Entity.ts": "@@ -53,10 +53,10 @@ export type EntityName<T extends IEntityType<T>> = string | EntityClass<T>;\n \n export type EntityData<T extends IEntityType<T>> = { [P in keyof T]?: T[P] | IPrimaryKey; } & Record<string, any>;\n \n-export interface EntityProperty {\n- name: string;\n+export interface EntityProperty<T extends IEntityType<T> = any> {\n+ name: string & keyof T;\n fk: string;\n- entity: () => EntityName<IEntity>;\n+ entity: () => EntityName<T>;\n type: string;\n primary: boolean;\n length?: any;\n@@ -84,7 +84,7 @@ export interface EntityMetadata<T extends IEntityType<T> = any> {\n path: string;\n primaryKey: keyof T & string;\n serializedPrimaryKey: keyof T & string;\n- properties: { [K in keyof T & string]: EntityProperty };\n+ properties: { [K in keyof T & string]: EntityProperty<T> };\n customRepository: () => { new (em: EntityManager, entityName: EntityName<T>): EntityRepository<T> };\n hooks: Record<string, string[]>;\n prototype: EntityClass<T> & IEntity;\n"}
fix: infinite recursion bugs
c31e93052b3d3390d53340590e78b24e786a4efb
fix
https://github.com/erg-lang/erg/commit/c31e93052b3d3390d53340590e78b24e786a4efb
infinite recursion bugs
{"eval.rs": "@@ -1337,6 +1337,8 @@ impl Context {\n }\n TyParam::FreeVar(fv) if fv.is_linked() => self.convert_tp_into_type(fv.crack().clone()),\n TyParam::Type(t) => Ok(t.as_ref().clone()),\n+ TyParam::Mono(name) => Ok(Type::Mono(name)),\n+ // TyParam::Erased(_t) => Ok(Type::Obj),\n TyParam::Value(v) => self.convert_value_into_type(v).map_err(TyParam::Value),\n // TODO: Dict, Set\n other => Err(other),\n@@ -1672,7 +1674,7 @@ impl Context {\n line!() as usize,\n ().loc(),\n self.caused_by(),\n- &tp.qual_name().unwrap_or(\"_\".into()),\n+ &tp.to_string(),\n )\n })?;\n if qt.is_generalized() {\n", "inquire.rs": "@@ -4,14 +4,15 @@ use std::path::{Path, PathBuf};\n \n use erg_common::config::Input;\n use erg_common::consts::{ERG_MODE, PYTHON_MODE};\n-use erg_common::dict;\n use erg_common::error::{ErrorCore, Location, SubMessage};\n use erg_common::levenshtein;\n use erg_common::set::Set;\n use erg_common::traits::{Locational, NoTypeDisplay, Stream};\n use erg_common::triple::Triple;\n use erg_common::Str;\n-use erg_common::{fmt_option, fmt_slice, log, option_enum_unwrap, set, switch_lang};\n+use erg_common::{\n+ dict, fmt_option, fmt_slice, get_hash, log, option_enum_unwrap, set, switch_lang,\n+};\n \n use erg_parser::ast::{self, Identifier, VarName};\n use erg_parser::token::Token;\n@@ -1024,20 +1025,23 @@ impl Context {\n let coerced = self\n .coerce(obj.t(), &())\n .map_err(|mut errs| errs.remove(0))?;\n- if &coerced == obj.ref_t() {\n- Err(TyCheckError::no_attr_error(\n- self.cfg.input.clone(),\n- line!() as usize,\n- attr_name.loc(),\n- namespace.name.to_string(),\n- obj.ref_t(),\n- attr_name.inspect(),\n- self.get_similar_attr(obj.ref_t(), attr_name.inspect()),\n- ))\n- } else {\n+ if &coerced != obj.ref_t() {\n+ let hash = get_hash(obj.ref_t());\n obj.ref_t().coerce();\n- self.search_method_info(obj, attr_name, pos_args, kw_args, input, namespace)\n+ if get_hash(obj.ref_t()) != hash {\n+ return self\n+ .search_method_info(obj, attr_name, pos_args, kw_args, input, namespace);\n+ }\n }\n+ Err(TyCheckError::no_attr_error(\n+ self.cfg.input.clone(),\n+ line!() as usize,\n+ attr_name.loc(),\n+ namespace.name.to_string(),\n+ obj.ref_t(),\n+ attr_name.inspect(),\n+ self.get_similar_attr(obj.ref_t(), attr_name.inspect()),\n+ ))\n }\n \n fn validate_visibility(\n@@ -1263,12 +1267,13 @@ impl Context {\n return Err(self.not_callable_error(obj, attr_name, instance, None));\n }\n if sub != Never {\n+ let hash = get_hash(instance);\n instance.coerce();\n if instance.is_quantified_subr() {\n let instance = self.instantiate(instance.clone(), obj)?;\n self.substitute_call(obj, attr_name, &instance, pos_args, kw_args)?;\n return Ok(SubstituteResult::Coerced(instance));\n- } else {\n+ } else if get_hash(instance) != hash {\n return self\n .substitute_call(obj, attr_name, instance, pos_args, kw_args);\n }\n"}
fix: Parse refs from bytes, not from String. The latter can cause issues around illformed UTF-8 which wouldn't bother git either. This comes at the expense of not parsing line by line anymore, but instead reading as fast as possible and parsing afterwards. Performance wise I think it doesn't matter, but it will cause more memory to be used. If this ever becomes a problem, for example during pushes where we are stuck with V1, we can consider implementing our own streaming appreach that works with packet lines instead - they are just not exposed here even though they could.
806b8c2ef392137f3a6ebd0f28da2a3a07a9f3eb
fix
https://github.com/Byron/gitoxide/commit/806b8c2ef392137f3a6ebd0f28da2a3a07a9f3eb
Parse refs from bytes, not from String. The latter can cause issues around illformed UTF-8 which wouldn't bother git either. This comes at the expense of not parsing line by line anymore, but instead reading as fast as possible and parsing afterwards. Performance wise I think it doesn't matter, but it will cause more memory to be used. If this ever becomes a problem, for example during pushes where we are stuck with V1, we can consider implementing our own streaming appreach that works with packet lines instead - they are just not exposed here even though they could.
{"Cargo.toml": "@@ -46,7 +46,7 @@ git-credentials = { version = \"^0.7.0\", path = \"../git-credentials\" }\n \n thiserror = \"1.0.32\"\n serde = { version = \"1.0.114\", optional = true, default-features = false, features = [\"derive\"]}\n-bstr = { version = \"1.0.1\", default-features = false, features = [\"std\"] }\n+bstr = { version = \"1.0.1\", default-features = false, features = [\"std\", \"unicode\"] }\n nom = { version = \"7\", default-features = false, features = [\"std\"]}\n btoi = \"0.4.2\"\n \n", "tests.rs": "@@ -15,6 +15,8 @@ unborn refs/heads/symbolic symref-target:refs/heads/target\n \"\n .as_bytes();\n \n+ #[cfg(feature = \"blocking-client\")]\n+ let input = &mut Fixture(input);\n let out = refs::from_v2_refs(input).await.expect(\"no failure on valid input\");\n \n assert_eq!(\n@@ -56,6 +58,7 @@ unborn refs/heads/symbolic symref-target:refs/heads/target\n \n #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n async fn extract_references_from_v1_refs() {\n+ #[cfg_attr(feature = \"blocking-client\", allow(unused_mut))]\n let input = &mut \"73a6868963993a3328e7d8fe94e5a6ac5078a944 HEAD\n 21c9b7500cb144b3169a6537961ec2b9e865be81 MISSING_NAMESPACE_TARGET\n 73a6868963993a3328e7d8fe94e5a6ac5078a944 refs/heads/main\n@@ -63,6 +66,8 @@ async fn extract_references_from_v1_refs() {\n dce0ea858eef7ff61ad345cc5cdac62203fb3c10 refs/tags/git-commitgraph-v0.0.0\n 21c9b7500cb144b3169a6537961ec2b9e865be81 refs/tags/git-commitgraph-v0.0.0^{}\"\n .as_bytes();\n+ #[cfg(feature = \"blocking-client\")]\n+ let input = &mut Fixture(input);\n let out = refs::from_v1_refs_received_as_part_of_handshake_and_capabilities(\n input,\n Capabilities::from_bytes(b\"\\0symref=HEAD:refs/heads/main symref=MISSING_NAMESPACE_TARGET:(null)\")\n@@ -106,7 +111,7 @@ fn extract_symbolic_references_from_capabilities() -> Result<(), client::Error>\n let caps = client::Capabilities::from_bytes(\n b\"\\0unrelated symref=HEAD:refs/heads/main symref=ANOTHER:refs/heads/foo symref=MISSING_NAMESPACE_TARGET:(null) agent=git/2.28.0\",\n )?\n- .0;\n+ .0;\n let out = refs::shared::from_capabilities(caps.iter()).expect(\"a working example\");\n \n assert_eq!(\n@@ -128,3 +133,38 @@ fn extract_symbolic_references_from_capabilities() -> Result<(), client::Error>\n );\n Ok(())\n }\n+\n+#[cfg(feature = \"blocking-client\")]\n+struct Fixture<'a>(&'a [u8]);\n+\n+#[cfg(feature = \"blocking-client\")]\n+impl<'a> std::io::Read for Fixture<'a> {\n+ fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {\n+ self.0.read(buf)\n+ }\n+}\n+\n+#[cfg(feature = \"blocking-client\")]\n+impl<'a> std::io::BufRead for Fixture<'a> {\n+ fn fill_buf(&mut self) -> std::io::Result<&[u8]> {\n+ self.0.fill_buf()\n+ }\n+\n+ fn consume(&mut self, amt: usize) {\n+ self.0.consume(amt)\n+ }\n+}\n+\n+#[cfg(feature = \"blocking-client\")]\n+impl<'a> git_transport::client::ReadlineBufRead for Fixture<'a> {\n+ fn readline(\n+ &mut self,\n+ ) -> Option<std::io::Result<Result<git_packetline::PacketLineRef<'_>, git_packetline::decode::Error>>> {\n+ use bstr::{BStr, ByteSlice};\n+ let bytes: &BStr = self.0.into();\n+ let mut lines = bytes.lines();\n+ let res = lines.next()?;\n+ self.0 = lines.as_bytes();\n+ Some(Ok(Ok(git_packetline::PacketLineRef::Data(res))))\n+ }\n+}\n", "arguments.rs": "@@ -1,333 +0,0 @@\n-use bstr::ByteSlice;\n-use git_transport::Protocol;\n-\n-use crate::fetch;\n-\n-fn arguments_v1(features: impl IntoIterator<Item = &'static str>) -> fetch::Arguments {\n- fetch::Arguments::new(Protocol::V1, features.into_iter().map(|n| (n, None)).collect())\n-}\n-\n-fn arguments_v2(features: impl IntoIterator<Item = &'static str>) -> fetch::Arguments {\n- fetch::Arguments::new(Protocol::V2, features.into_iter().map(|n| (n, None)).collect())\n-}\n-\n-struct Transport<T> {\n- inner: T,\n- stateful: bool,\n-}\n-\n-#[cfg(feature = \"blocking-client\")]\n-mod impls {\n- use std::borrow::Cow;\n-\n- use bstr::BStr;\n- use git_transport::{\n- client,\n- client::{Error, MessageKind, RequestWriter, SetServiceResponse, WriteMode},\n- Protocol, Service,\n- };\n-\n- use crate::fetch::tests::arguments::Transport;\n-\n- impl<T: client::TransportWithoutIO> client::TransportWithoutIO for Transport<T> {\n- fn set_identity(&mut self, identity: client::Account) -> Result<(), Error> {\n- self.inner.set_identity(identity)\n- }\n-\n- fn request(&mut self, write_mode: WriteMode, on_into_read: MessageKind) -> Result<RequestWriter<'_>, Error> {\n- self.inner.request(write_mode, on_into_read)\n- }\n-\n- fn to_url(&self) -> Cow<'_, BStr> {\n- self.inner.to_url()\n- }\n-\n- fn supported_protocol_versions(&self) -> &[Protocol] {\n- self.inner.supported_protocol_versions()\n- }\n-\n- fn connection_persists_across_multiple_requests(&self) -> bool {\n- self.stateful\n- }\n-\n- fn configure(\n- &mut self,\n- config: &dyn std::any::Any,\n- ) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {\n- self.inner.configure(config)\n- }\n- }\n-\n- impl<T: client::Transport> client::Transport for Transport<T> {\n- fn handshake<'a>(\n- &mut self,\n- service: Service,\n- extra_parameters: &'a [(&'a str, Option<&'a str>)],\n- ) -> Result<SetServiceResponse<'_>, Error> {\n- self.inner.handshake(service, extra_parameters)\n- }\n- }\n-}\n-\n-#[cfg(feature = \"async-client\")]\n-mod impls {\n- use std::borrow::Cow;\n-\n- use async_trait::async_trait;\n- use bstr::BStr;\n- use git_transport::{\n- client,\n- client::{Error, MessageKind, RequestWriter, SetServiceResponse, WriteMode},\n- Protocol, Service,\n- };\n-\n- use crate::fetch::tests::arguments::Transport;\n- impl<T: client::TransportWithoutIO + Send> client::TransportWithoutIO for Transport<T> {\n- fn set_identity(&mut self, identity: client::Account) -> Result<(), Error> {\n- self.inner.set_identity(identity)\n- }\n-\n- fn request(&mut self, write_mode: WriteMode, on_into_read: MessageKind) -> Result<RequestWriter<'_>, Error> {\n- self.inner.request(write_mode, on_into_read)\n- }\n-\n- fn to_url(&self) -> Cow<'_, BStr> {\n- self.inner.to_url()\n- }\n-\n- fn supported_protocol_versions(&self) -> &[Protocol] {\n- self.inner.supported_protocol_versions()\n- }\n-\n- fn connection_persists_across_multiple_requests(&self) -> bool {\n- self.stateful\n- }\n-\n- fn configure(\n- &mut self,\n- config: &dyn std::any::Any,\n- ) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {\n- self.inner.configure(config)\n- }\n- }\n-\n- #[async_trait(?Send)]\n- impl<T: client::Transport + Send> client::Transport for Transport<T> {\n- async fn handshake<'a>(\n- &mut self,\n- service: Service,\n- extra_parameters: &'a [(&'a str, Option<&'a str>)],\n- ) -> Result<SetServiceResponse<'_>, Error> {\n- self.inner.handshake(service, extra_parameters).await\n- }\n- }\n-}\n-\n-fn transport(\n- out: &mut Vec<u8>,\n- stateful: bool,\n-) -> Transport<git_transport::client::git::Connection<&'static [u8], &mut Vec<u8>>> {\n- Transport {\n- inner: git_transport::client::git::Connection::new(\n- &[],\n- out,\n- Protocol::V1, // does not matter\n- b\"does/not/matter\".as_bstr().to_owned(),\n- None::<(&str, _)>,\n- git_transport::client::git::ConnectMode::Process, // avoid header to be sent\n- ),\n- stateful,\n- }\n-}\n-\n-fn id(hex: &str) -> git_hash::ObjectId {\n- git_hash::ObjectId::from_hex(hex.as_bytes()).expect(\"expect valid hex id\")\n-}\n-\n-mod v1 {\n- use bstr::ByteSlice;\n-\n- use crate::fetch::tests::arguments::{arguments_v1, id, transport};\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn haves_and_wants_for_clone() {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, true);\n- let mut arguments = arguments_v1([\"feature-a\", \"feature-b\"].iter().cloned());\n-\n- arguments.want(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c907\"));\n- arguments.want(id(\"ff333369de1221f9bfbbe03a3a13e9a09bc1ffff\"));\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"0046want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a feature-b\n-0032want ff333369de1221f9bfbbe03a3a13e9a09bc1ffff\n-00000009done\n-\"\n- .as_bstr()\n- );\n- }\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn haves_and_wants_for_fetch_stateless() {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, false);\n- let mut arguments = arguments_v1([\"feature-a\", \"shallow\", \"deepen-since\", \"deepen-not\"].iter().copied());\n-\n- arguments.deepen(1);\n- arguments.shallow(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\"));\n- arguments.want(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c907\"));\n- arguments.deepen_since(12345);\n- arguments.deepen_not(\"refs/heads/main\".into());\n- arguments.have(id(\"0000000000000000000000000000000000000000\"));\n- arguments.send(&mut t, false).await.expect(\"sending to buffer to work\");\n-\n- arguments.have(id(\"1111111111111111111111111111111111111111\"));\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"005cwant 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow deepen-since deepen-not\n-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\n-000ddeepen 1\n-0017deepen-since 12345\n-001fdeepen-not refs/heads/main\n-00000032have 0000000000000000000000000000000000000000\n-0000005cwant 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow deepen-since deepen-not\n-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\n-000ddeepen 1\n-0017deepen-since 12345\n-001fdeepen-not refs/heads/main\n-00000032have 1111111111111111111111111111111111111111\n-0009done\n-\"\n- .as_bstr()\n- );\n- }\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn haves_and_wants_for_fetch_stateful() {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, true);\n- let mut arguments = arguments_v1([\"feature-a\", \"shallow\"].iter().copied());\n-\n- arguments.deepen(1);\n- arguments.want(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c907\"));\n- arguments.have(id(\"0000000000000000000000000000000000000000\"));\n- arguments.send(&mut t, false).await.expect(\"sending to buffer to work\");\n-\n- arguments.have(id(\"1111111111111111111111111111111111111111\"));\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"0044want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907 feature-a shallow\n-000ddeepen 1\n-00000032have 0000000000000000000000000000000000000000\n-00000032have 1111111111111111111111111111111111111111\n-0009done\n-\"\n- .as_bstr()\n- );\n- }\n-}\n-\n-mod v2 {\n- use bstr::ByteSlice;\n-\n- use crate::fetch::tests::arguments::{arguments_v2, id, transport};\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn haves_and_wants_for_clone_stateful() {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, true);\n- let mut arguments = arguments_v2([\"feature-a\", \"shallow\"].iter().copied());\n-\n- arguments.deepen(1);\n- arguments.deepen_relative();\n- arguments.want(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c907\"));\n- arguments.want(id(\"ff333369de1221f9bfbbe03a3a13e9a09bc1ffff\"));\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"0012command=fetch\n-0001000ethin-pack\n-0010include-tag\n-000eofs-delta\n-000ddeepen 1\n-0014deepen-relative\n-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907\n-0032want ff333369de1221f9bfbbe03a3a13e9a09bc1ffff\n-0009done\n-0000\"\n- .as_bstr(),\n- \"we filter features/capabilities without value as these apparently shouldn't be listed (remote dies otherwise)\"\n- );\n- }\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn haves_and_wants_for_fetch_stateless_and_stateful() {\n- for is_stateful in &[false, true] {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, *is_stateful);\n- let mut arguments = arguments_v2(Some(\"shallow\"));\n-\n- arguments.deepen(1);\n- arguments.deepen_since(12345);\n- arguments.shallow(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\"));\n- arguments.want(id(\"7b333369de1221f9bfbbe03a3a13e9a09bc1c907\"));\n- arguments.deepen_not(\"refs/heads/main\".into());\n- arguments.have(id(\"0000000000000000000000000000000000000000\"));\n- arguments.send(&mut t, false).await.expect(\"sending to buffer to work\");\n-\n- arguments.have(id(\"1111111111111111111111111111111111111111\"));\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"0012command=fetch\n-0001000ethin-pack\n-0010include-tag\n-000eofs-delta\n-000ddeepen 1\n-0017deepen-since 12345\n-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\n-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907\n-001fdeepen-not refs/heads/main\n-0032have 0000000000000000000000000000000000000000\n-00000012command=fetch\n-0001000ethin-pack\n-0010include-tag\n-000eofs-delta\n-000ddeepen 1\n-0017deepen-since 12345\n-0035shallow 7b333369de1221f9bfbbe03a3a13e9a09bc1c9ff\n-0032want 7b333369de1221f9bfbbe03a3a13e9a09bc1c907\n-001fdeepen-not refs/heads/main\n-0032have 1111111111111111111111111111111111111111\n-0009done\n-0000\"\n- .as_bstr(),\n- \"V2 is stateless by default, so it repeats all but 'haves' in each request\"\n- );\n- }\n- }\n-\n- #[maybe_async::test(feature = \"blocking-client\", async(feature = \"async-client\", async_std::test))]\n- async fn ref_in_want() {\n- let mut out = Vec::new();\n- let mut t = transport(&mut out, false);\n- let mut arguments = arguments_v2([\"ref-in-want\"].iter().copied());\n-\n- arguments.want_ref(b\"refs/heads/main\".as_bstr());\n- arguments.send(&mut t, true).await.expect(\"sending to buffer to work\");\n- assert_eq!(\n- out.as_bstr(),\n- b\"0012command=fetch\n-0001000ethin-pack\n-0010include-tag\n-000eofs-delta\n-001dwant-ref refs/heads/main\n-0009done\n-0000\"\n- .as_bstr()\n- )\n- }\n-}\n", "mod.rs": "@@ -13,17 +13,19 @@ pub mod parse {\n #[error(transparent)]\n Io(#[from] std::io::Error),\n #[error(transparent)]\n+ DecodePacketline(#[from] git_transport::packetline::decode::Error),\n+ #[error(transparent)]\n Id(#[from] git_hash::decode::Error),\n #[error(\"{symref:?} could not be parsed. A symref is expected to look like <NAME>:<target>.\")]\n MalformedSymref { symref: BString },\n #[error(\"{0:?} could not be parsed. A V1 ref line should be '<hex-hash> <path>'.\")]\n- MalformedV1RefLine(String),\n+ MalformedV1RefLine(BString),\n #[error(\n \"{0:?} could not be parsed. A V2 ref line should be '<hex-hash> <path>[ (peeled|symref-target):<value>'.\"\n )]\n- MalformedV2RefLine(String),\n+ MalformedV2RefLine(BString),\n #[error(\"The ref attribute {attribute:?} is unknown. Found in line {line:?}\")]\n- UnkownAttribute { attribute: String, line: String },\n+ UnkownAttribute { attribute: BString, line: BString },\n #[error(\"{message}\")]\n InvariantViolation { message: &'static str },\n }\n@@ -65,3 +67,6 @@ pub use async_io::{from_v1_refs_received_as_part_of_handshake_and_capabilities,\n mod blocking_io;\n #[cfg(feature = \"blocking-client\")]\n pub use blocking_io::{from_v1_refs_received_as_part_of_handshake_and_capabilities, from_v2_refs};\n+\n+#[cfg(test)]\n+mod tests;\n", "async_io.rs": "@@ -1,19 +1,17 @@\n use futures_io::AsyncBufRead;\n-use futures_lite::AsyncBufReadExt;\n+use futures_lite::AsyncReadExt;\n \n use crate::handshake::{refs, refs::parse::Error, Ref};\n+use bstr::ByteSlice;\n \n /// Parse refs from the given input line by line. Protocol V2 is required for this to succeed.\n pub async fn from_v2_refs(in_refs: &mut (dyn AsyncBufRead + Unpin)) -> Result<Vec<Ref>, Error> {\n let mut out_refs = Vec::new();\n- let mut line = String::new();\n- loop {\n- line.clear();\n- let bytes_read = in_refs.read_line(&mut line).await?;\n- if bytes_read == 0 {\n- break;\n- }\n- out_refs.push(refs::shared::parse_v2(&line)?);\n+ let mut buf = Vec::new();\n+\n+ in_refs.read_to_end(&mut buf).await?;\n+ for line in ByteSlice::lines(buf.as_slice()) {\n+ out_refs.push(refs::shared::parse_v2(line.into())?);\n }\n Ok(out_refs)\n }\n@@ -32,14 +30,11 @@ pub async fn from_v1_refs_received_as_part_of_handshake_and_capabilities<'a>(\n ) -> Result<Vec<Ref>, refs::parse::Error> {\n let mut out_refs = refs::shared::from_capabilities(capabilities)?;\n let number_of_possible_symbolic_refs_for_lookup = out_refs.len();\n- let mut line = String::new();\n- loop {\n- line.clear();\n- let bytes_read = in_refs.read_line(&mut line).await?;\n- if bytes_read == 0 {\n- break;\n- }\n- refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, &line)?;\n+\n+ let mut buf = Vec::new();\n+ in_refs.read_to_end(&mut buf).await?;\n+ for line in buf.as_slice().lines() {\n+ refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, line.into())?;\n }\n Ok(out_refs.into_iter().map(Into::into).collect())\n }\n", "blocking_io.rs": "@@ -1,18 +1,10 @@\n-use std::io;\n-\n use crate::handshake::{refs, refs::parse::Error, Ref};\n \n /// Parse refs from the given input line by line. Protocol V2 is required for this to succeed.\n-pub fn from_v2_refs(in_refs: &mut dyn io::BufRead) -> Result<Vec<Ref>, Error> {\n+pub fn from_v2_refs(in_refs: &mut dyn git_transport::client::ReadlineBufRead) -> Result<Vec<Ref>, Error> {\n let mut out_refs = Vec::new();\n- let mut line = String::new();\n- loop {\n- line.clear();\n- let bytes_read = in_refs.read_line(&mut line)?;\n- if bytes_read == 0 {\n- break;\n- }\n- out_refs.push(refs::shared::parse_v2(&line)?);\n+ while let Some(line) = in_refs.readline().transpose()?.transpose()?.and_then(|l| l.as_bstr()) {\n+ out_refs.push(refs::shared::parse_v2(line)?);\n }\n Ok(out_refs)\n }\n@@ -26,19 +18,14 @@ pub fn from_v2_refs(in_refs: &mut dyn io::BufRead) -> Result<Vec<Ref>, Error> {\n /// Symbolic refs are shoe-horned into server capabilities whereas refs (without symbolic ones) are sent automatically as\n /// part of the handshake. Both symbolic and peeled refs need to be combined to fit into the [`Ref`] type provided here.\n pub fn from_v1_refs_received_as_part_of_handshake_and_capabilities<'a>(\n- in_refs: &mut dyn io::BufRead,\n+ in_refs: &mut dyn git_transport::client::ReadlineBufRead,\n capabilities: impl Iterator<Item = git_transport::client::capabilities::Capability<'a>>,\n ) -> Result<Vec<Ref>, Error> {\n let mut out_refs = refs::shared::from_capabilities(capabilities)?;\n let number_of_possible_symbolic_refs_for_lookup = out_refs.len();\n- let mut line = String::new();\n- loop {\n- line.clear();\n- let bytes_read = in_refs.read_line(&mut line)?;\n- if bytes_read == 0 {\n- break;\n- }\n- refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, &line)?;\n+\n+ while let Some(line) = in_refs.readline().transpose()?.transpose()?.and_then(|l| l.as_bstr()) {\n+ refs::shared::parse_v1(number_of_possible_symbolic_refs_for_lookup, &mut out_refs, line)?;\n }\n Ok(out_refs.into_iter().map(Into::into).collect())\n }\n", "shared.rs": "@@ -1,4 +1,4 @@\n-use bstr::{BString, ByteSlice};\n+use bstr::{BStr, BString, ByteSlice};\n \n use crate::handshake::{refs::parse::Error, Ref};\n \n@@ -70,7 +70,7 @@ impl InternalRef {\n _ => None,\n }\n }\n- fn lookup_symbol_has_path(&self, predicate_path: &str) -> bool {\n+ fn lookup_symbol_has_path(&self, predicate_path: &BStr) -> bool {\n matches!(self, InternalRef::SymbolicForLookup { path, .. } if path == predicate_path)\n }\n }\n@@ -109,19 +109,19 @@ pub(crate) fn from_capabilities<'a>(\n pub(in crate::handshake::refs) fn parse_v1(\n num_initial_out_refs: usize,\n out_refs: &mut Vec<InternalRef>,\n- line: &str,\n+ line: &BStr,\n ) -> Result<(), Error> {\n let trimmed = line.trim_end();\n let (hex_hash, path) = trimmed.split_at(\n trimmed\n- .find(' ')\n- .ok_or_else(|| Error::MalformedV1RefLine(trimmed.to_owned()))?,\n+ .find(b\" \")\n+ .ok_or_else(|| Error::MalformedV1RefLine(trimmed.to_owned().into()))?,\n );\n let path = &path[1..];\n if path.is_empty() {\n- return Err(Error::MalformedV1RefLine(trimmed.to_owned()));\n+ return Err(Error::MalformedV1RefLine(trimmed.to_owned().into()));\n }\n- match path.strip_suffix(\"^{}\") {\n+ match path.strip_suffix(b\"^{}\") {\n Some(stripped) => {\n let (previous_path, tag) =\n out_refs\n@@ -146,7 +146,7 @@ pub(in crate::handshake::refs) fn parse_v1(\n match out_refs\n .iter()\n .take(num_initial_out_refs)\n- .position(|r| r.lookup_symbol_has_path(path))\n+ .position(|r| r.lookup_symbol_has_path(path.into()))\n {\n Some(position) => match out_refs.swap_remove(position) {\n InternalRef::SymbolicForLookup { path: _, target } => out_refs.push(InternalRef::Symbolic {\n@@ -166,36 +166,36 @@ pub(in crate::handshake::refs) fn parse_v1(\n Ok(())\n }\n \n-pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {\n+pub(in crate::handshake::refs) fn parse_v2(line: &BStr) -> Result<Ref, Error> {\n let trimmed = line.trim_end();\n- let mut tokens = trimmed.splitn(3, ' ');\n+ let mut tokens = trimmed.splitn(3, |b| *b == b' ');\n match (tokens.next(), tokens.next()) {\n (Some(hex_hash), Some(path)) => {\n- let id = if hex_hash == \"unborn\" {\n+ let id = if hex_hash == b\"unborn\" {\n None\n } else {\n Some(git_hash::ObjectId::from_hex(hex_hash.as_bytes())?)\n };\n if path.is_empty() {\n- return Err(Error::MalformedV2RefLine(trimmed.to_owned()));\n+ return Err(Error::MalformedV2RefLine(trimmed.to_owned().into()));\n }\n Ok(if let Some(attribute) = tokens.next() {\n- let mut tokens = attribute.splitn(2, ':');\n+ let mut tokens = attribute.splitn(2, |b| *b == b':');\n match (tokens.next(), tokens.next()) {\n (Some(attribute), Some(value)) => {\n if value.is_empty() {\n- return Err(Error::MalformedV2RefLine(trimmed.to_owned()));\n+ return Err(Error::MalformedV2RefLine(trimmed.to_owned().into()));\n }\n match attribute {\n- \"peeled\" => Ref::Peeled {\n+ b\"peeled\" => Ref::Peeled {\n full_ref_name: path.into(),\n object: git_hash::ObjectId::from_hex(value.as_bytes())?,\n tag: id.ok_or(Error::InvariantViolation {\n message: \"got 'unborn' as tag target\",\n })?,\n },\n- \"symref-target\" => match value {\n- \"(null)\" => Ref::Direct {\n+ b\"symref-target\" => match value {\n+ b\"(null)\" => Ref::Direct {\n full_ref_name: path.into(),\n object: id.ok_or(Error::InvariantViolation {\n message: \"got 'unborn' while (null) was a symref target\",\n@@ -215,13 +215,13 @@ pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {\n },\n _ => {\n return Err(Error::UnkownAttribute {\n- attribute: attribute.to_owned(),\n- line: trimmed.to_owned(),\n+ attribute: attribute.to_owned().into(),\n+ line: trimmed.to_owned().into(),\n })\n }\n }\n }\n- _ => return Err(Error::MalformedV2RefLine(trimmed.to_owned())),\n+ _ => return Err(Error::MalformedV2RefLine(trimmed.to_owned().into())),\n }\n } else {\n Ref::Direct {\n@@ -232,6 +232,6 @@ pub(in crate::handshake::refs) fn parse_v2(line: &str) -> Result<Ref, Error> {\n }\n })\n }\n- _ => Err(Error::MalformedV2RefLine(trimmed.to_owned())),\n+ _ => Err(Error::MalformedV2RefLine(trimmed.to_owned().into())),\n }\n }\n"}
test: add "describeMethods" scope
761adace7c9680c7e16a0f69096cb3b4f66d7410
test
https://github.com/pmndrs/react-spring/commit/761adace7c9680c7e16a0f69096cb3b4f66d7410
add "describeMethods" scope
{"SpringValue.test.ts": "@@ -15,41 +15,7 @@ describe('SpringValue', () => {\n })\n \n describeProps()\n-\n- describe('\"set\" method', () => {\n- it('stops the active animation', async () => {\n- const spring = new SpringValue(0)\n- const promise = spring.start(1)\n-\n- await advanceUntilValue(spring, 0.5)\n- spring.set(2)\n-\n- expect(spring.idle).toBeTruthy()\n- expect(await promise).toMatchObject({\n- finished: false,\n- value: 2,\n- })\n- })\n-\n- describe('when a new value is passed', () => {\n- it('calls the \"onChange\" prop', () => {\n- const onChange = jest.fn()\n- const spring = new SpringValue(0, { onChange })\n- spring.set(1)\n- expect(onChange).toBeCalledWith(1, spring)\n- })\n- it.todo('wraps the \"onChange\" call with \"batchedUpdates\"')\n- })\n-\n- describe('when the current value is passed', () => {\n- it('skips the \"onChange\" call', () => {\n- const onChange = jest.fn()\n- const spring = new SpringValue(0, { onChange })\n- spring.set(0)\n- expect(onChange).not.toBeCalled()\n- })\n- })\n- })\n+ describeMethods()\n \n describeTarget('another SpringValue', from => {\n const node = new SpringValue(from)\n@@ -128,6 +94,43 @@ function describeConfigProp() {\n })\n }\n \n+function describeMethods() {\n+ describe('\"set\" method', () => {\n+ it('stops the active animation', async () => {\n+ const spring = new SpringValue(0)\n+ const promise = spring.start(1)\n+\n+ await advanceUntilValue(spring, 0.5)\n+ spring.set(2)\n+\n+ expect(spring.idle).toBeTruthy()\n+ expect(await promise).toMatchObject({\n+ finished: false,\n+ value: 2,\n+ })\n+ })\n+\n+ describe('when a new value is passed', () => {\n+ it('calls the \"onChange\" prop', () => {\n+ const onChange = jest.fn()\n+ const spring = new SpringValue(0, { onChange })\n+ spring.set(1)\n+ expect(onChange).toBeCalledWith(1, spring)\n+ })\n+ it.todo('wraps the \"onChange\" call with \"batchedUpdates\"')\n+ })\n+\n+ describe('when the current value is passed', () => {\n+ it('skips the \"onChange\" call', () => {\n+ const onChange = jest.fn()\n+ const spring = new SpringValue(0, { onChange })\n+ spring.set(0)\n+ expect(onChange).not.toBeCalled()\n+ })\n+ })\n+ })\n+}\n+\n /** The minimum requirements for testing a dynamic target */\n type OpaqueTarget = {\n node: FrameValue\n"}
test(benchmarks): add `to_pyarrow` benchmark for duckdb
a80cac77f749a03d04c5f37edc152ce15ea0c43e
test
https://github.com/rohankumardubey/ibis/commit/a80cac77f749a03d04c5f37edc152ce15ea0c43e
add `to_pyarrow` benchmark for duckdb
{"test_benchmarks.py": "@@ -753,3 +753,59 @@ def test_parse_many_duckdb_types(benchmark):\n \n types = [\"VARCHAR\", \"INTEGER\", \"DOUBLE\", \"BIGINT\"] * 1000\n benchmark(parse_many, types)\n+\n+\[email protected](scope=\"session\")\n+def sql() -> str:\n+ return \"\"\"\n+ SELECT t1.id as t1_id, x, t2.id as t2_id, y\n+ FROM t1 INNER JOIN t2\n+ ON t1.id = t2.id\n+ \"\"\"\n+\n+\[email protected](scope=\"session\")\n+def ddb(tmp_path_factory):\n+ duckdb = pytest.importorskip(\"duckdb\")\n+\n+ N = 20_000_000\n+\n+ con = duckdb.connect()\n+\n+ path = str(tmp_path_factory.mktemp(\"duckdb\") / \"data.ddb\")\n+ sql = (\n+ lambda var, table, n=N: f\"\"\"\n+ CREATE TABLE {table} AS\n+ SELECT ROW_NUMBER() OVER () AS id, {var}\n+ FROM (\n+ SELECT {var}\n+ FROM RANGE({n}) _ ({var})\n+ ORDER BY RANDOM()\n+ )\n+ \"\"\"\n+ )\n+\n+ with duckdb.connect(path) as con:\n+ con.execute(sql(\"x\", table=\"t1\"))\n+ con.execute(sql(\"y\", table=\"t2\"))\n+ return path\n+\n+\n+def test_duckdb_to_pyarrow(benchmark, sql, ddb) -> None:\n+ # yes, we're benchmarking duckdb here, not ibis\n+ #\n+ # we do this to get a baseline for comparison\n+ duckdb = pytest.importorskip(\"duckdb\")\n+ con = duckdb.connect(ddb, read_only=True)\n+\n+ benchmark(lambda sql: con.sql(sql).to_arrow_table(), sql)\n+\n+\n+def test_ibis_duckdb_to_pyarrow(benchmark, sql, ddb) -> None:\n+ pytest.importorskip(\"duckdb\")\n+ pytest.importorskip(\"duckdb_engine\")\n+\n+ con = ibis.duckdb.connect(ddb, read_only=True)\n+\n+ expr = con.sql(sql)\n+ benchmark(expr.to_pyarrow)\n"}
ci: reenable doctest builds (#9353)
4769ee5ecb8f10f17d10749a0228b6eb4d094a86
ci
https://github.com/ibis-project/ibis/commit/4769ee5ecb8f10f17d10749a0228b6eb4d094a86
reenable doctest builds (#9353)
{"ibis-main.yml": "@@ -128,8 +128,6 @@ jobs:\n run: poetry run python -c 'import shapely.geometry, duckdb'\n \n test_doctests:\n- # FIXME(kszucs): re-enable this build\n- if: false\n name: Doctests\n runs-on: ubuntu-latest\n steps:\n@@ -147,7 +145,7 @@ jobs:\n uses: actions/setup-python@v5\n id: install_python\n with:\n- python-version: \"3.12\"\n+ python-version: \"3.10\"\n \n - name: install poetry\n run: pip install 'poetry==1.8.3'\n"}
fix: `where -> were` typo fix. (#560)
0eca94d84bd82f2083b41acdb316edce54365f11
fix
https://github.com/Byron/gitoxide/commit/0eca94d84bd82f2083b41acdb316edce54365f11
`where -> were` typo fix. (#560)
{"write.rs": "@@ -327,7 +327,7 @@ impl section::Segment {\n if *conventional_count == 1 { \"was\" } else { \"were\" }\n )?;\n if unique_issues.is_empty() {\n- writeln!(out, \" - 0 issues like '(#ID)' where seen in commit messages\")?;\n+ writeln!(out, \" - 0 issues like '(#ID)' were seen in commit messages\")?;\n } else {\n writeln!(\n out,\n", "CHANGELOG.md": "@@ -13,7 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0\n \n - 1 commit contributed to the release.\n - 0 commits were understood as [conventional](https://www.conventionalcommits.org).\n- - 0 issues like '(#ID)' where seen in commit messages\n+ - 0 issues like '(#ID)' were seen in commit messages\n \n ### Commit Details\n \n"}
chore: update dependencies
2ee33a66feacd52b3fa651a1dfbd32b0412949ab
chore
https://github.com/mikro-orm/mikro-orm/commit/2ee33a66feacd52b3fa651a1dfbd32b0412949ab
update dependencies
{"package.json": "@@ -1,6 +1,6 @@\n {\n \"name\": \"mikro-orm\",\n- \"version\": \"2.0.0-alpha.13\",\n+ \"version\": \"2.0.0-rc\",\n \"description\": \"Simple typescript ORM for node.js based on data-mapper, unit-of-work and identity-map patterns. Supports MongoDB, MySQL and SQLite databases as well as usage with vanilla JS.\",\n \"main\": \"dist/index.js\",\n \"typings\": \"dist/index.d.ts\",\n@@ -76,7 +76,7 @@\n \"fast-deep-equal\": \"^2.0.1\",\n \"globby\": \"^9.1.0\",\n \"node-request-context\": \"^1.0.5\",\n- \"ts-morph\": \"^1.2.0\",\n+ \"ts-morph\": \"^1.3.0\",\n \"typescript\": \"^3.3.3\",\n \"uuid\": \"^3.3.2\"\n },\n@@ -91,15 +91,15 @@\n \"@types/clone\": \"^0.1.30\",\n \"@types/globby\": \"^8.0.0\",\n \"@types/jest\": \"^24.0.9\",\n- \"@types/mongodb\": \"^3.1.19\",\n+ \"@types/mongodb\": \"^3.1.20\",\n \"@types/mysql2\": \"types/mysql2\",\n- \"@types/node\": \"^11.9.6\",\n+ \"@types/node\": \"^11.10.4\",\n \"@types/uuid\": \"^3.4.4\",\n \"codacy-coverage\": \"^3.4.0\",\n \"coveralls\": \"^3.0.3\",\n \"husky\": \"^1.3.1\",\n \"jest\": \"^24.1.0\",\n- \"lint-staged\": \"^8.1.4\",\n+ \"lint-staged\": \"^8.1.5\",\n \"mongodb\": \"^3.1.13\",\n \"mysql2\": \"^1.6.5\",\n \"rimraf\": \"^2.6.3\",\n@@ -107,6 +107,6 @@\n \"sqlite\": \"^3.0.2\",\n \"ts-jest\": \"^24.0.0\",\n \"ts-node\": \"^8.0.2\",\n- \"tslint\": \"^5.13.0\"\n+ \"tslint\": \"^5.13.1\"\n }\n }\n"}
chore: replace `quick-error` with `thiserror` This increases the compile time of the crate alone if there is no proc-macro in the dependency tree, but will ever so slightly improve compile times for `gix` as a whole.
cce96ee1382d3d56d77820a2aba6e2d17b52f91c
chore
https://github.com/Byron/gitoxide/commit/cce96ee1382d3d56d77820a2aba6e2d17b52f91c
replace `quick-error` with `thiserror` This increases the compile time of the crate alone if there is no proc-macro in the dependency tree, but will ever so slightly improve compile times for `gix` as a whole.
{"Cargo.lock": "@@ -1622,9 +1622,9 @@ dependencies = [\n \"once_cell\",\n \"parking_lot 0.12.1\",\n \"prodash\",\n- \"quick-error 2.0.1\",\n \"sha1\",\n \"sha1_smol\",\n+ \"thiserror\",\n \"walkdir\",\n ]\n \n", "Cargo.toml": "@@ -43,7 +43,7 @@ crc32 = [\"crc32fast\"]\n ## and reduced performance is acceptable. **zlib-stock** can be used if dynamic linking of an external zlib library is desired or if cmake is not available.\n ## Note that a competitive Zlib implementation is critical to `gitoxide's` object database performance.\n ## Additional backends are supported, each of which overriding the default Rust backend.\n-zlib = [\"flate2\", \"flate2/rust_backend\", \"quick-error\"]\n+zlib = [\"flate2\", \"flate2/rust_backend\", \"thiserror\"]\n ## Use zlib-ng (libz-ng-sys) with native API (no compat mode) that can co-exist with system libz.\n zlib-ng= [\"flate2/zlib-ng\"]\n ## Use a C-based backend which can compress and decompress significantly faster than the other options.\n@@ -125,7 +125,7 @@ bytes = { version = \"1.0.0\", optional = true }\n \n # zlib module\n flate2 = { version = \"1.0.17\", optional = true, default-features = false }\n-quick-error = { version = \"2.0.0\", optional = true }\n+thiserror = { version = \"1.0.38\", optional = true }\n \n ## If enabled, OnceCell will be made available for interior mutability either in sync or unsync forms.\n once_cell = { version = \"1.13.0\", optional = true }\n", "mod.rs": "@@ -2,24 +2,16 @@ pub use flate2::{Decompress, Status};\n \n /// non-streaming interfaces for decompression\n pub mod inflate {\n- use quick_error::quick_error;\n- quick_error! {\n- /// The error returned by various [Inflate methods][super::Inflate]\n- #[allow(missing_docs)]\n- #[derive(Debug)]\n- pub enum Error {\n- WriteInflated(err: std::io::Error) {\n- display(\"Could not write all bytes when decompressing content\")\n- from()\n- }\n- Inflate(err: flate2::DecompressError) {\n- display(\"Could not decode zip stream, status was '{:?}'\", err)\n- from()\n- }\n- Status(status: flate2::Status) {\n- display(\"The zlib status indicated an error, status was '{:?}'\", status)\n- }\n- }\n+ /// The error returned by various [Inflate methods][super::Inflate]\n+ #[derive(Debug, thiserror::Error)]\n+ #[allow(missing_docs)]\n+ pub enum Error {\n+ #[error(\"Could not write all bytes when decompressing content\")]\n+ WriteInflated(#[from] std::io::Error),\n+ #[error(\"Could not decode zip stream, status was '{0:?}'\")]\n+ Inflate(#[from] flate2::DecompressError),\n+ #[error(\"The zlib status indicated an error, status was '{0:?}'\")]\n+ Status(flate2::Status),\n }\n }\n \n"}
build: updated versions
0b9cb35626036ccc4d41909c1d6c0e43c5f15c60
build
https://github.com/tsparticles/tsparticles/commit/0b9cb35626036ccc4d41909c1d6c0e43c5f15c60
updated versions
{"package.dist.json": "@@ -99,7 +99,7 @@\n \"./package.json\": \"./package.json\"\n },\n \"dependencies\": {\n- \"@tsparticles/engine\": \"^\"\n+ \"@tsparticles/engine\": \"^3.5.0\"\n },\n \"publishConfig\": {\n \"access\": \"public\"\n"}
fix: test failure
3df896e1c699dfcf6f206081c1f8c2b12b8f1a84
fix
https://github.com/erg-lang/erg/commit/3df896e1c699dfcf6f206081c1f8c2b12b8f1a84
test failure
{"test.rs": "@@ -184,7 +184,7 @@ fn test_tolerant_completion() -> Result<(), Box<dyn std::error::Error>> {\n let resp = client.request_completion(uri.raw(), 2, 10, \".\")?;\n if let Some(CompletionResponse::Array(items)) = resp {\n assert!(items.len() >= 10);\n- assert!(items.iter().any(|item| item.label == \"tqdm\"));\n+ assert!(items.iter().any(|item| item.label == \"pi\"));\n Ok(())\n } else {\n Err(format!(\"not items: {resp:?}\").into())\n", "tolerant_completion.er": "@@ -1,6 +1,6 @@\n-tqdm = pyimport \"tqdm\"\n+math = pyimport \"math\"\n \n-f _: tqdm\n+f _: math\n i = 1\n s = \"a\"\n g() = None + i s i\n", "build.rs": "@@ -31,6 +31,12 @@ fn main() -> std::io::Result<()> {\n copy_dir(&erg_path, \"lib\").unwrap_or_else(|_| {\n eprintln!(\"failed to copy the std library to {erg_path}\");\n });\n+ let pkgs_path = path::Path::new(&erg_path).join(\"lib\").join(\"pkgs\");\n+ if !pkgs_path.exists() {\n+ fs::create_dir(&pkgs_path).unwrap_or_else(|_| {\n+ eprintln!(\"failed to create the directory: {}\", pkgs_path.display());\n+ });\n+ }\n Ok(())\n }\n \n"}
build: try fixing publish issues
e14dc55a9fdf5368faaaf8ab2c01012eda8c2a39
build
https://github.com/tsparticles/tsparticles/commit/e14dc55a9fdf5368faaaf8ab2c01012eda8c2a39
try fixing publish issues
{"package.json": "@@ -19,6 +19,9 @@\n \"ini\": \"^2.0.0\",\n \"lerna\": \"^4.0.0\"\n },\n+ \"resolutions\": {\n+ \"npm-packlist\": \"1.1.12\"\n+ },\n \"husky\": {\n \"hooks\": {\n \"commit-msg\": \"commitlint -E HUSKY_GIT_PARAMS\"\n"}
fix: make `_stopAnimation` clear keys from the prop cache This ensures that future `_diff` calls return true, which is required for starting animations.
0e7d65d367fc3d6af7555e079f6306591972d0d7
fix
https://github.com/pmndrs/react-spring/commit/0e7d65d367fc3d6af7555e079f6306591972d0d7
make `_stopAnimation` clear keys from the prop cache This ensures that future `_diff` calls return true, which is required for starting animations.
{"Controller.ts": "@@ -573,11 +573,20 @@ class Controller<State extends object = any> {\n animatedValues = toArray(animated.getPayload() as any)\n }\n \n+ // Replace the animation config with a lighter object\n this.animations[key] = { key, animated, animatedValues } as any\n+\n+ // Tell the frameloop: \"these animations are done\"\n animatedValues.forEach(v => (v.done = true))\n \n- // Prevent delayed updates to this key.\n+ // Prevent delayed updates to this key\n this.timestamps['to.' + key] = now()\n+ this.timestamps['from.' + key] = now()\n+\n+ // Clear this key from the prop cache, so future diffs are guaranteed\n+ const { to, from } = this.props\n+ if (is.obj(to)) delete to[key]\n+ if (from) delete from[key]\n }\n }\n \n"}
refactor(core): remove `persist/remove/flush` methods from `EntityRepository` (#4130) Following methods are no longer available on the `EntityRepository` instance. - `persist` - `persistAndFlush` - `remove` - `removeAndFlush` - `flush` They were confusing as they gave a false sense of working with a scoped context (e.g. only with a `User` type), while in fact, they were only shortcuts for the same methods of underlying `EntityManager`. You should work with the `EntityManager` directly instead of using a repository when it comes to entity persistence, repositories should be treated as an extension point for custom logic (e.g. wrapping query builder usage). ```diff -userRepository.persist(user); -await userRepository.flush(); +em.persist(user); +await em.flush(); ``` > Alternatively, you can use the `repository.getEntityManager()` method to access those methods directly on the `EntityManager`. If you want to keep those methods on the repository level, you can define custom base repository and use it globally: ```ts import { EntityManager, EntityRepository, AnyEntity } from '@mikro-orm/mysql'; export class ExtendedEntityRepository<T extends object> extends EntityRepository<T> { persist(entity: AnyEntity | AnyEntity[]): EntityManager { return this.em.persist(entity); } async persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void> { await this.em.persistAndFlush(entity); } remove(entity: AnyEntity): EntityManager { return this.em.remove(entity); } async removeAndFlush(entity: AnyEntity): Promise<void> { await this.em.removeAndFlush(entity); } async flush(): Promise<void> { return this.em.flush(); } } ``` And specify it in the ORM config: ```ts MikroORM.init({ entityRepository: () => ExtendedEntityRepository, }) ``` You might as well want to use the `EntityRepositoryType` symbol, possibly in a custom base entity. Related: #3989
e649480308cf367b73cda79ab5791a9dd3247fdd
refactor
https://github.com/mikro-orm/mikro-orm/commit/e649480308cf367b73cda79ab5791a9dd3247fdd
remove `persist/remove/flush` methods from `EntityRepository` (#4130) Following methods are no longer available on the `EntityRepository` instance. - `persist` - `persistAndFlush` - `remove` - `removeAndFlush` - `flush` They were confusing as they gave a false sense of working with a scoped context (e.g. only with a `User` type), while in fact, they were only shortcuts for the same methods of underlying `EntityManager`. You should work with the `EntityManager` directly instead of using a repository when it comes to entity persistence, repositories should be treated as an extension point for custom logic (e.g. wrapping query builder usage). ```diff -userRepository.persist(user); -await userRepository.flush(); +em.persist(user); +await em.flush(); ``` > Alternatively, you can use the `repository.getEntityManager()` method to access those methods directly on the `EntityManager`. If you want to keep those methods on the repository level, you can define custom base repository and use it globally: ```ts import { EntityManager, EntityRepository, AnyEntity } from '@mikro-orm/mysql'; export class ExtendedEntityRepository<T extends object> extends EntityRepository<T> { persist(entity: AnyEntity | AnyEntity[]): EntityManager { return this.em.persist(entity); } async persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void> { await this.em.persistAndFlush(entity); } remove(entity: AnyEntity): EntityManager { return this.em.remove(entity); } async removeAndFlush(entity: AnyEntity): Promise<void> { await this.em.removeAndFlush(entity); } async flush(): Promise<void> { return this.em.flush(); } } ``` And specify it in the ORM config: ```ts MikroORM.init({ entityRepository: () => ExtendedEntityRepository, }) ``` You might as well want to use the `EntityRepositoryType` symbol, possibly in a custom base entity. Related: #3989
{"repositories.md": "@@ -1,15 +1,15 @@\n ---\n-title: Using EntityRepository instead of EntityManager\n-sidebar_label: Entity Repository\n+title: Entity Repository\n ---\n \n-Entity Repositories are thin layers on top of `EntityManager`. They act as an extension point, so we can add custom methods, or even alter the existing ones. The default, `EntityRepository` implementation just forwards the calls to underlying `EntityManager` instance.\n+Entity Repositories are thin layers on top of `EntityManager`. They act as an extension point, so you can add custom methods, or even alter the existing ones. The default `EntityRepository` implementation just forwards the calls to underlying `EntityManager` instance.\n \n-> `EntityRepository` class carries the entity type, so we do not have to pass it to every `find` or `findOne` calls.\n+> `EntityRepository` class carries the entity type, so you do not have to pass it to every `find` or `findOne` calls.\n \n ```ts\n const booksRepository = em.getRepository(Book);\n \n+// same as `em.find(Book, { author: '...' }, { ... })`\n const books = await booksRepository.find({ author: '...' }, {\n populate: ['author'],\n limit: 1,\n@@ -20,13 +20,11 @@ const books = await booksRepository.find({ author: '...' }, {\n console.log(books); // Book[]\n ```\n \n-Note that there is no such thing as \"flushing repository\" - it is just a shortcut to `em.flush()`. In other words, we always flush the whole Unit of Work, not just a single entity that this repository represents.\n-\n ## Custom Repository\n \n :::info\n \n-Since v4, we need to make sure we are working with correctly typed `EntityRepository` to have access to driver specific methods (like `createQueryBuilder()`). Use the one exported from your driver package.\n+Since v4, you need to make sure you are working with correctly typed `EntityRepository` to have access to driver specific methods (like `createQueryBuilder()`). Use the one exported from your driver package.\n \n :::\n \n@@ -56,13 +54,13 @@ export class Author {\n \n > `@Repository()` decorator has been removed in v5, use `@Entity({ repository: () => MyRepository })` instead.\n \n-Note that we need to pass that repository reference inside a callback so we will not run into circular dependency issues when using entity references inside that repository.\n+Note that you need to pass that repository reference inside a callback so you will not run into circular dependency issues when using entity references inside that repository.\n \n-Now we can access our custom repository via `em.getRepository()` method.\n+Now you can access your custom repository via `em.getRepository()` method.\n \n ### Inferring custom repository type\n \n-To have the `em.getRepository()` method return correctly typed custom repository instead of the generic `EntityRepository<T>`, we can use `EntityRepositoryType` symbol:\n+To have the `em.getRepository()` method return correctly typed custom repository instead of the generic `EntityRepository<T>`, you can use `EntityRepositoryType` symbol:\n \n ```ts\n @Entity({ repository: () => AuthorRepository })\n@@ -75,6 +73,58 @@ export class Author {\n const repo = em.getRepository(Author); // repo has type AuthorRepository\n ```\n \n-> We can also register custom base repository (for all entities where we do not specify `repository`) globally, via `MikroORM.init({ entityRepository: CustomBaseRepository })`.\n+> You can also register custom base repository (for all entities where you do not specify `repository`) globally, via `MikroORM.init({ entityRepository: CustomBaseRepository })`.\n+\n+## Removed methods from `EntityRepository` interface\n+\n+Following methods are no longer available on the `EntityRepository` instance since v6:\n+\n+- `persist`\n+- `persistAndFlush`\n+- `remove`\n+- `removeAndFlush`\n+- `flush`\n+\n+They were confusing as they gave a false sense of working with a scoped context (e.g. only with a `User` type), while in fact, they were only shortcuts for the same methods of underlying `EntityManager`. You should work with the `EntityManager` directly instead of using a repository when it comes to entity persistence, repositories should be treated as an extension point for custom logic (e.g. wrapping query builder usage).\n+\n+> Alternatively, you can use the `repository.getEntityManager()` method to access those methods directly on the `EntityManager`.\n+\n+If you want to keep those methods on repository level, you can define custom base repository and use it globally:\n+\n+```ts\n+import { EntityManager, EntityRepository, AnyEntity } from '@mikro-orm/mysql';\n+\n+export class ExtendedEntityRepository<T extends object> extends EntityRepository<T> {\n+\n+ persist(entity: AnyEntity | AnyEntity[]): EntityManager {\n+ return this.em.persist(entity);\n+ }\n+\n+ async persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void> {\n+ await this.em.persistAndFlush(entity);\n+ }\n+\n+ remove(entity: AnyEntity): EntityManager {\n+ return this.em.remove(entity);\n+ }\n+\n+ async removeAndFlush(entity: AnyEntity): Promise<void> {\n+ await this.em.removeAndFlush(entity);\n+ }\n+\n+ async flush(): Promise<void> {\n+ return this.em.flush();\n+ }\n+\n+}\n+```\n+\n+And specify it in the ORM config:\n+\n+```ts\n+MikroORM.init({\n+ entityRepository: () => ExtendedEntityRepository,\n+})\n+```\n \n-For more examples, take a look at [`tests/EntityManager.mongo.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts) or [`tests/EntityManager.mysql.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mysql.test.ts).\n+You might as well want to use the `EntityRepositoryType` symbol, possibly in a custom base entity.\n", "upgrading-v5-to-v6.md": "@@ -31,6 +31,67 @@ const email = book.author.email; // ok, selected\n const name = book.author.name; // fail, not selected\n ```\n \n+## Removed methods from `EntityRepository`\n+\n+Following methods are no longer available on the `EntityRepository` instance. \n+\n+- `persist`\n+- `persistAndFlush`\n+- `remove`\n+- `removeAndFlush`\n+- `flush`\n+\n+They were confusing as they gave a false sense of working with a scoped context (e.g. only with a `User` type), while in fact, they were only shortcuts for the same methods of underlying `EntityManager`. You should work with the `EntityManager` directly instead of using a repository when it comes to entity persistence, repositories should be treated as an extension point for custom logic (e.g. wrapping query builder usage).\n+\n+```diff\n+-userRepository.persist(user);\n+-await userRepository.flush();\n++em.persist(user);\n++await em.flush();\n+```\n+\n+> Alternatively, you can use the `repository.getEntityManager()` method to access those methods directly on the `EntityManager`.\n+\n+If you want to keep those methods on repository level, you can define custom base repository and use it globally:\n+\n+```ts\n+import { EntityManager, EntityRepository, AnyEntity } from '@mikro-orm/mysql';\n+\n+export class ExtendedEntityRepository<T extends object> extends EntityRepository<T> {\n+\n+ persist(entity: AnyEntity | AnyEntity[]): EntityManager {\n+ return this.em.persist(entity);\n+ }\n+\n+ async persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void> {\n+ await this.em.persistAndFlush(entity);\n+ }\n+\n+ remove(entity: AnyEntity): EntityManager {\n+ return this.em.remove(entity);\n+ }\n+\n+ async removeAndFlush(entity: AnyEntity): Promise<void> {\n+ await this.em.removeAndFlush(entity);\n+ }\n+\n+ async flush(): Promise<void> {\n+ return this.em.flush();\n+ }\n+\n+}\n+```\n+\n+And specify it in the ORM config:\n+\n+```ts\n+MikroORM.init({\n+ entityRepository: () => ExtendedEntityRepository,\n+})\n+```\n+\n+You might as well want to use the `EntityRepositoryType` symbol, possibly in a custom base entity.\n+\n ## Removal of static require calls\n \n There were some places where we did a static `require()` call, e.g. when loading the driver implementation based on the `type` option. Those places were problematic for bundlers like webpack, as well as new school build systems like vite.\n", "EntityRepository.ts": "@@ -1,6 +1,6 @@\n import type { CreateOptions, EntityManager, MergeOptions } from '../EntityManager';\n import type { AssignOptions } from './EntityAssigner';\n-import type { EntityData, EntityName, AnyEntity, Primary, Loaded, FilterQuery, EntityDictionary, AutoPath, RequiredEntityData, Ref } from '../typings';\n+import type { EntityData, EntityName, Primary, Loaded, FilterQuery, EntityDictionary, AutoPath, RequiredEntityData, Ref } from '../typings';\n import type {\n CountOptions,\n DeleteOptions,\n@@ -20,29 +20,9 @@ import type { Cursor } from '../utils/Cursor';\n \n export class EntityRepository<Entity extends object> {\n \n- constructor(protected readonly _em: EntityManager,\n+ constructor(protected readonly em: EntityManager,\n protected readonly entityName: EntityName<Entity>) { }\n \n- /**\n- * Tells the EntityManager to make an instance managed and persistent.\n- * The entity will be entered into the database at or before transaction commit or as a result of the flush operation.\n- *\n- * @deprecated this method will be removed in v6, you should work with the EntityManager instead\n- */\n- persist(entity: AnyEntity | AnyEntity[]): EntityManager {\n- return this.getEntityManager().persist(entity);\n- }\n-\n- /**\n- * Persists your entity immediately, flushing all not yet persisted changes to the database too.\n- * Equivalent to `em.persist(e).flush()`.\n- *\n- * @deprecated this method will be removed in v6, you should work with the EntityManager instead\n- */\n- async persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void> {\n- await this.getEntityManager().persistAndFlush(entity);\n- }\n-\n /**\n * Finds first entity matching your `where` query.\n */\n@@ -161,40 +141,6 @@ export class EntityRepository<Entity extends object> {\n return this.getEntityManager().find<Entity, Hint, Fields>(this.entityName, {} as FilterQuery<Entity>, options);\n }\n \n- /**\n- * Marks entity for removal.\n- * A removed entity will be removed from the database at or before transaction commit or as a result of the flush operation.\n- *\n- * To remove entities by condition, use `em.nativeDelete()`.\n- *\n- * @deprecated this method will be removed in v6, you should work with the EntityManager instead\n- */\n- remove(entity: AnyEntity): EntityManager {\n- return this.getEntityManager().remove(entity);\n- }\n-\n- /**\n- * Removes an entity instance immediately, flushing all not yet persisted changes to the database too.\n- * Equivalent to `em.remove(e).flush()`\n- *\n- * @deprecated this method will be removed in v6, you should work with the EntityManager instead\n- */\n- async removeAndFlush(entity: AnyEntity): Promise<void> {\n- await this.getEntityManager().removeAndFlush(entity);\n- }\n-\n- /**\n- * Flushes all changes to objects that have been queued up to now to the database.\n- * This effectively synchronizes the in-memory state of managed objects with the database.\n- * This method is a shortcut for `em.flush()`, in other words, it will flush the whole UoW,\n- * not just entities registered via this particular repository.\n- *\n- * @deprecated this method will be removed in v6, you should work with the EntityManager instead\n- */\n- async flush(): Promise<void> {\n- return this.getEntityManager().flush();\n- }\n-\n /**\n * @inheritDoc EntityManager.insert\n */\n@@ -305,13 +251,6 @@ export class EntityRepository<Entity extends object> {\n return this.getEntityManager().count<Entity, Hint>(this.entityName, where, options);\n }\n \n- /**\n- * @deprecated this method will be removed in v6, use the public `getEntityManager()` method instead\n- */\n- protected get em(): EntityManager {\n- return this._em;\n- }\n-\n getEntityName(): string {\n return Utils.className(this.entityName);\n }\n@@ -320,7 +259,7 @@ export class EntityRepository<Entity extends object> {\n * Returns the underlying EntityManager instance\n */\n getEntityManager(): EntityManager {\n- return this._em;\n+ return this.em;\n }\n \n protected validateRepositoryType(entities: Entity[] | Entity, method: string) {\n", "SqlEntityRepository.ts": "@@ -5,9 +5,9 @@ import type { QueryBuilder } from './query';\n \n export class SqlEntityRepository<T extends object> extends EntityRepository<T> {\n \n- constructor(protected override readonly _em: SqlEntityManager,\n+ constructor(protected override readonly em: SqlEntityManager,\n entityName: EntityName<T>) {\n- super(_em, entityName);\n+ super(em, entityName);\n }\n \n /**\n@@ -28,21 +28,14 @@ export class SqlEntityRepository<T extends object> extends EntityRepository<T> {\n * Returns configured knex instance.\n */\n getKnex(type?: ConnectionType): Knex {\n- return this.getEntityManager().getConnection(type).getKnex();\n+ return this.getEntityManager().getKnex();\n }\n \n /**\n * @inheritDoc\n */\n override getEntityManager(): SqlEntityManager {\n- return this._em;\n- }\n-\n- /**\n- * @inheritDoc\n- */\n- protected override get em(): SqlEntityManager {\n- return this._em;\n+ return this.em;\n }\n \n }\n", "MongoEntityRepository.ts": "@@ -4,9 +4,9 @@ import type { MongoEntityManager } from './MongoEntityManager';\n \n export class MongoEntityRepository<T extends object> extends EntityRepository<T> {\n \n- constructor(protected override readonly _em: MongoEntityManager,\n+ constructor(protected override readonly em: MongoEntityManager,\n entityName: EntityName<T>) {\n- super(_em, entityName);\n+ super(em, entityName);\n }\n \n /**\n@@ -24,14 +24,7 @@ export class MongoEntityRepository<T extends object> extends EntityRepository<T>\n * @inheritDoc\n */\n override getEntityManager(): MongoEntityManager {\n- return this._em;\n- }\n-\n- /**\n- * @inheritDoc\n- */\n- protected override get em(): MongoEntityManager {\n- return this._em;\n+ return this.em;\n }\n \n }\n", "EntityManager.mariadb.test.ts": "@@ -126,11 +126,10 @@ describe('EntityManagerMariaDb', () => {\n book3.createdAt = new Date(Date.now() + 3);\n book3.publisher = wrap(publisher).toReference();\n \n- const repo = orm.em.getRepository(Book2);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- await repo.flush();\n+ orm.em.persist(book1);\n+ orm.em.persist(book2);\n+ orm.em.persist(book3);\n+ await orm.em.flush();\n orm.em.clear();\n \n const publisher7k = (await orm.em.getRepository(Publisher2).findOne({ name: '7K publisher' }))!;\n@@ -228,7 +227,7 @@ describe('EntityManagerMariaDb', () => {\n expect(lastBook[0].title).toBe('My Life on The Wall, part 1');\n expect(lastBook[0].author).toBeInstanceOf(Author2);\n expect(wrap(lastBook[0].author).isInitialized()).toBe(true);\n- await orm.em.getRepository(Book2).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n });\n", "EntityManager.mongo.test.ts": "@@ -59,11 +59,7 @@ describe('EntityManagerMongo', () => {\n const book3 = new Book('My Life on The Wall, part 3', author);\n book3.publisher = publisherRef;\n \n- const repo = orm.em.getRepository(Book);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- await repo.flush();\n+ await orm.em.persist(publisher).flush();\n orm.em.clear();\n \n const publisher7k = (await orm.em.getRepository(Publisher).findOne({ name: '7K publisher' }))!;\n@@ -186,7 +182,7 @@ describe('EntityManagerMongo', () => {\n expect(lastBook3.length).toBe(1);\n expect(lastBook[0]).toBe(lastBook3[0]);\n \n- await orm.em.getRepository(Book).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n test('should provide custom repository', async () => {\n@@ -319,7 +315,7 @@ describe('EntityManagerMongo', () => {\n const repo = orm.em.getRepository(Author);\n const author = new Author('name 1', 'email1');\n const author2 = new Author('name 2', 'email2');\n- await repo.persistAndFlush([author, author2]);\n+ await orm.em.persistAndFlush([author, author2]);\n orm.em.clear();\n \n const a2 = await repo.findOne({ name: /^name/ }, {\n@@ -345,7 +341,7 @@ describe('EntityManagerMongo', () => {\n const repo = orm.em.getRepository(Author);\n const author = new Author('name', 'email');\n author.favouriteAuthor = author;\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n const a = await repo.findOne(author);\n const authors = await repo.find({ favouriteAuthor: author });\n expect(a).toBe(author);\n@@ -357,26 +353,26 @@ describe('EntityManagerMongo', () => {\n const author2 = new Author('name2', 'email2');\n const author3 = new Author('name3', 'email3');\n const repo = orm.em.getRepository(Author);\n- repo.persist(author);\n- repo.persist(author2);\n- await repo.removeAndFlush(author);\n+ orm.em.persist(author);\n+ orm.em.persist(author2);\n+ await orm.em.removeAndFlush(author);\n expect([...orm.em.getUnitOfWork().getIdentityMap().keys()]).toEqual([`Author-${author2.id}`]);\n author2.name = 'lol';\n- repo.persist(author2);\n+ orm.em.persist(author2);\n orm.em.remove(author3);\n- await repo.flush();\n+ await orm.em.flush();\n });\n \n test('removing persisted entity will remove it from persist stack first', async () => {\n const author = new Author('name', 'email');\n const repo = orm.em.getRepository(Author);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(orm.em.getUnitOfWork().getById<Author>(Author.name, author.id)).toBeDefined();\n author.name = 'new name';\n- repo.persist(author);\n+ orm.em.persist(author);\n orm.em.remove(author);\n expect(orm.em.getUnitOfWork().getById<Author>(Author.name, author.id)).toBeDefined();\n- await repo.flush();\n+ await orm.em.flush();\n expect(orm.em.getUnitOfWork().getById<Author>(Author.name, author.id)).toBeUndefined();\n expect(orm.em.getUnitOfWork().getIdentityMap()).toEqual({\n registry: new Map([\n@@ -409,7 +405,7 @@ describe('EntityManagerMongo', () => {\n test('removing persisted entity via PK', async () => {\n const author = new Author('name', 'email');\n const repo = orm.em.getRepository(Author);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n const mock = mockLogger(orm);\n@@ -708,7 +704,7 @@ describe('EntityManagerMongo', () => {\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository(Author);\n const jon = new Author('Jon Snow', '[email protected]');\n- await authorRepository.persistAndFlush(jon);\n+ await orm.em.persistAndFlush(jon);\n \n orm.em.clear();\n let author = (await authorRepository.findOne(jon._id))!;\n@@ -1371,26 +1367,26 @@ describe('EntityManagerMongo', () => {\n expect(author.versionAsString).toBeUndefined();\n expect(author.hookTest).toBe(false);\n \n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.id).not.toBeNull();\n expect(author.version).toBe(1);\n expect(author.versionAsString).toBe('v1');\n expect(author.hookTest).toBe(true);\n \n author.name = 'John Snow';\n- await repo.flush();\n+ await orm.em.flush();\n expect(author.version).toBe(2);\n expect(author.versionAsString).toBe('v2');\n \n expect(Author.beforeDestroyCalled).toBe(0);\n expect(Author.afterDestroyCalled).toBe(0);\n- await repo.removeAndFlush(author);\n+ await orm.em.removeAndFlush(author);\n expect(Author.beforeDestroyCalled).toBe(1);\n expect(Author.afterDestroyCalled).toBe(1);\n \n const author2 = new Author('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author2);\n- await repo.removeAndFlush(author2);\n+ await orm.em.persistAndFlush(author2);\n+ await orm.em.removeAndFlush(author2);\n expect(Author.beforeDestroyCalled).toBe(2);\n expect(Author.afterDestroyCalled).toBe(2);\n });\n@@ -1406,7 +1402,7 @@ describe('EntityManagerMongo', () => {\n test('trying to populate non-existing or non-reference property will throw', async () => {\n const repo = orm.em.getRepository(Author);\n const author = new Author('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n await expect(repo.findAll({ populate: ['tests'] as never })).rejects.toThrowError(`Entity 'Author' does not have property 'tests'`);\n@@ -1421,7 +1417,7 @@ describe('EntityManagerMongo', () => {\n let t3 = Test.create('t3');\n await orm.em.persistAndFlush([t1, t2, t3]);\n publisher.tests.add(t2, t1, t3);\n- await repo.persistAndFlush(publisher);\n+ await orm.em.persistAndFlush(publisher);\n orm.em.clear();\n \n const ent = (await repo.findOne(publisher.id))!;\n@@ -1433,7 +1429,7 @@ describe('EntityManagerMongo', () => {\n \n [t1, t2, t3] = ent.tests.getItems();\n ent.tests.set([t3, t2, t1]);\n- await repo.flush();\n+ await orm.em.flush();\n orm.em.clear();\n \n const ent1 = (await repo.findOne(publisher.id))!;\n@@ -1484,10 +1480,10 @@ describe('EntityManagerMongo', () => {\n await expect(author.updatedAt).toBeDefined();\n // allow 1 ms difference as updated time is recalculated when persisting\n await expect(+author.updatedAt - +author.createdAt!).toBeLessThanOrEqual(1);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n \n author.name = 'name1';\n- await repo.flush();\n+ await orm.em.flush();\n await expect(author.createdAt).toBeDefined();\n await expect(author.updatedAt).toBeDefined();\n await expect(author.updatedAt).not.toEqual(author.createdAt);\n", "EntityManager.mysql.test.ts": "@@ -185,7 +185,7 @@ describe('EntityManagerMySql', () => {\n const author = new Author2('name', 'email');\n author.termsAccepted = true;\n author.favouriteAuthor = author;\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n const a = await repo.findOne(author);\n \n const authors = await repo.find({ favouriteAuthor: author });\n@@ -246,10 +246,10 @@ describe('EntityManagerMySql', () => {\n test('should work with boolean values', async () => {\n const repo = orm.em.getRepository(Author2);\n const author = new Author2('name', 'email');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.termsAccepted).toBe(false);\n author.termsAccepted = true;\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.termsAccepted).toBe(true);\n orm.em.clear();\n \n@@ -258,7 +258,7 @@ describe('EntityManagerMySql', () => {\n const a2 = (await repo.findOne({ termsAccepted: true }))!;\n expect(a2).not.toBeNull();\n a2.termsAccepted = false;\n- await repo.persistAndFlush(a2);\n+ await orm.em.persistAndFlush(a2);\n orm.em.clear();\n \n const a3 = (await repo.findOne({ termsAccepted: false }))!;\n@@ -470,11 +470,10 @@ describe('EntityManagerMySql', () => {\n book3.createdAt = new Date(Date.now() + 3);\n book3.publisher = wrap(publisher).toReference();\n \n- const repo = orm.em.getRepository(Book2);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- await repo.flush();\n+ orm.em.persist(book1);\n+ orm.em.persist(book2);\n+ orm.em.persist(book3);\n+ await orm.em.flush();\n orm.em.clear();\n \n const publisher7k = (await orm.em.getRepository(Publisher2).findOne({ name: '7K publisher' }))!;\n@@ -583,7 +582,7 @@ describe('EntityManagerMySql', () => {\n expect(lastBook[0].title).toBe('My Life on The Wall, part 1');\n expect(lastBook[0].author).toBeInstanceOf(Author2);\n expect(wrap(lastBook[0].author).isInitialized()).toBe(true);\n- await orm.em.getRepository(Book2).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n test('json properties', async () => {\n@@ -944,7 +943,7 @@ describe('EntityManagerMySql', () => {\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository(Author2);\n const jon = new Author2('Jon Snow', '[email protected]');\n- await authorRepository.persistAndFlush(jon);\n+ await orm.em.persistAndFlush(jon);\n \n orm.em.clear();\n let author = (await authorRepository.findOne(jon.id))!;\n@@ -1404,7 +1403,7 @@ describe('EntityManagerMySql', () => {\n test('trying to populate non-existing or non-reference property will throw', async () => {\n const repo = orm.em.getRepository(Author2);\n const author = new Author2('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n await expect(repo.findAll({ populate: ['tests'] as never })).rejects.toThrowError(`Entity 'Author2' does not have property 'tests'`);\n@@ -1419,7 +1418,7 @@ describe('EntityManagerMySql', () => {\n let t3 = Test2.create('t3');\n await orm.em.persistAndFlush([t1, t2, t3]);\n publisher.tests.add(t2, t1, t3);\n- await repo.persistAndFlush(publisher);\n+ await orm.em.persistAndFlush(publisher);\n orm.em.clear();\n \n const ent = (await repo.findOne(publisher.id, { populate: ['tests'] }))!;\n@@ -1431,7 +1430,7 @@ describe('EntityManagerMySql', () => {\n \n [t1, t2, t3] = ent.tests.getItems();\n ent.tests.set([t3, t2, t1]);\n- await repo.flush();\n+ await orm.em.flush();\n orm.em.clear();\n \n const ent1 = (await repo.findOne(publisher.id, { populate: ['tests'] }))!;\n@@ -1684,10 +1683,10 @@ describe('EntityManagerMySql', () => {\n await expect(author.updatedAt).toBeDefined();\n // allow 1 ms difference as updated time is recalculated when persisting\n await expect(+author.updatedAt - +author.createdAt).toBeLessThanOrEqual(1);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n \n author.name = 'name1';\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n await expect(author.createdAt).toBeDefined();\n await expect(author.updatedAt).toBeDefined();\n await expect(author.updatedAt).not.toEqual(author.createdAt);\n", "EntityManager.postgre.test.ts": "@@ -202,7 +202,7 @@ describe('EntityManagerPostgre', () => {\n const author = new Author2('name', 'email');\n author.termsAccepted = true;\n author.favouriteAuthor = author;\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n const a = await repo.findOne(author);\n const authors = await repo.find({ favouriteAuthor: author });\n expect(a).toBe(author);\n@@ -331,7 +331,7 @@ describe('EntityManagerPostgre', () => {\n test('collection loads items after savepoint should not fail', async () => {\n const publisher = new Publisher2('7K publisher', PublisherType.GLOBAL);\n const book = new Book2('My Life on The Wall, part 1', new Author2('name', 'email'));\n- book.publisher = wrap(publisher).toReference();\n+ book.publisher = ref(publisher);\n \n const author = new Author2('Bartleby', '[email protected]');\n author.books.add(book);\n@@ -417,17 +417,16 @@ describe('EntityManagerPostgre', () => {\n const publisher = new Publisher2('7K publisher', PublisherType.GLOBAL);\n \n const book1 = new Book2('My Life on The Wall, part 1', author);\n- book1.publisher = wrap(publisher).toReference();\n+ book1.publisher = ref(publisher);\n const book2 = new Book2('My Life on The Wall, part 2', author);\n- book2.publisher = wrap(publisher).toReference();\n+ book2.publisher = ref(publisher);\n const book3 = new Book2('My Life on The Wall, part 3', author);\n- book3.publisher = wrap(publisher).toReference();\n+ book3.publisher = ref(publisher);\n \n- const repo = orm.em.getRepository(Book2);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- await repo.flush();\n+ orm.em.persist(book1);\n+ orm.em.persist(book2);\n+ orm.em.persist(book3);\n+ await orm.em.flush();\n orm.em.clear();\n \n const publisher7k = (await orm.em.getRepository(Publisher2).findOne({ name: '7K publisher' }))!;\n@@ -529,7 +528,7 @@ describe('EntityManagerPostgre', () => {\n expect(lastBook[0].title).toBe('My Life on The Wall, part 1');\n expect(lastBook[0].author).toBeInstanceOf(Author2);\n expect(wrap(lastBook[0].author).isInitialized()).toBe(true);\n- await orm.em.getRepository(Book2).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n test('json properties', async () => {\n@@ -945,7 +944,7 @@ describe('EntityManagerPostgre', () => {\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository(Author2);\n const jon = new Author2('Jon Snow', '[email protected]');\n- await authorRepository.persistAndFlush(jon);\n+ await orm.em.persistAndFlush(jon);\n \n orm.em.clear();\n let author = (await authorRepository.findOne(jon.id))!;\n@@ -963,7 +962,7 @@ describe('EntityManagerPostgre', () => {\n const publisher = new Publisher2('Publisher');\n const god = new Author2('God', '[email protected]');\n const bible = new Book2('Bible', god);\n- bible.publisher = wrap(publisher).toReference();\n+ bible.publisher = ref(publisher);\n await orm.em.persistAndFlush(bible);\n \n let jon = new Author2('Jon Snow', '[email protected]');\n@@ -1393,25 +1392,25 @@ describe('EntityManagerPostgre', () => {\n expect(author.versionAsString).toBeUndefined();\n expect(author.code).toBe('[email protected] - Jon Snow');\n \n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.id).toBeDefined();\n expect(author.version).toBe(1);\n expect(author.versionAsString).toBe('v1');\n \n author.name = 'John Snow';\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.version).toBe(2);\n expect(author.versionAsString).toBe('v2');\n \n expect(Author2.beforeDestroyCalled).toBe(0);\n expect(Author2.afterDestroyCalled).toBe(0);\n- await repo.removeAndFlush(author);\n+ await orm.em.removeAndFlush(author);\n expect(Author2.beforeDestroyCalled).toBe(1);\n expect(Author2.afterDestroyCalled).toBe(1);\n \n const author2 = new Author2('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author2);\n- await repo.removeAndFlush(author2);\n+ await orm.em.persistAndFlush(author2);\n+ await orm.em.removeAndFlush(author2);\n expect(Author2.beforeDestroyCalled).toBe(2);\n expect(Author2.afterDestroyCalled).toBe(2);\n });\n@@ -1435,7 +1434,7 @@ describe('EntityManagerPostgre', () => {\n test('trying to populate non-existing or non-reference property will throw', async () => {\n const repo = orm.em.getRepository(Author2);\n const author = new Author2('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n await expect(repo.findAll({ populate: ['tests'] as never })).rejects.toThrowError(`Entity 'Author2' does not have property 'tests'`);\n@@ -1450,7 +1449,7 @@ describe('EntityManagerPostgre', () => {\n const t3 = Test2.create('t3');\n await orm.em.persistAndFlush([t1, t2, t3]);\n publisher.tests.add(t2, t1, t3);\n- await repo.persistAndFlush(publisher);\n+ await orm.em.persistAndFlush(publisher);\n orm.em.clear();\n \n const ent = (await repo.findOne(publisher.id, { populate: ['tests'] }))!;\n@@ -1468,10 +1467,10 @@ describe('EntityManagerPostgre', () => {\n await expect(author.updatedAt).toBeDefined();\n // allow 1 ms difference as updated time is recalculated when persisting\n await expect(+author.updatedAt - +author.createdAt).toBeLessThanOrEqual(1);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n \n author.name = 'name1';\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n await expect(author.createdAt).toBeDefined();\n await expect(author.updatedAt).toBeDefined();\n await expect(author.updatedAt).not.toEqual(author.createdAt);\n", "EntityManager.sqlite.test.ts": "@@ -94,7 +94,7 @@ describe('EntityManagerSqlite', () => {\n test('should convert entity to PK when trying to search by entity', async () => {\n const repo = orm.em.getRepository<any>(Author3);\n const author = new Author3('name', 'email');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n const a = await repo.findOne(author);\n const authors = await repo.find({ id: author });\n expect(a).toBe(author);\n@@ -229,11 +229,10 @@ describe('EntityManagerSqlite', () => {\n const book3 = new Book3('My Life on The Wall, part 3', author);\n book3.publisher = publisher;\n \n- const repo = orm.em.getRepository(Book3);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- await repo.flush();\n+ orm.em.persist(book1);\n+ orm.em.persist(book2);\n+ orm.em.persist(book3);\n+ await orm.em.flush();\n orm.em.clear();\n \n const publisher7k = (await orm.em.getRepository<any>(Publisher3).findOne({ name: '7K publisher' }))!;\n@@ -326,7 +325,7 @@ describe('EntityManagerSqlite', () => {\n expect(lastBook[0].title).toBe('My Life on The Wall, part 1');\n expect(lastBook[0].author).toBeInstanceOf(Author3);\n expect(lastBook[0].author.isInitialized()).toBe(true);\n- await orm.em.getRepository(Book3).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n test('findOne should initialize entity that is already in IM', async () => {\n@@ -546,7 +545,7 @@ describe('EntityManagerSqlite', () => {\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository<any>(Author3);\n const jon = new Author3('Jon Snow', '[email protected]');\n- await authorRepository.persistAndFlush(jon);\n+ await orm.em.persistAndFlush(jon);\n \n orm.em.clear();\n let author = (await authorRepository.findOne(jon.id))!;\n@@ -759,7 +758,7 @@ describe('EntityManagerSqlite', () => {\n expect(author.baseVersion).toBeUndefined();\n expect(author.baseVersionAsString).toBeUndefined();\n \n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.id).toBeDefined();\n expect(author.version).toBe(1);\n expect(author.versionAsString).toBe('v1');\n@@ -767,7 +766,7 @@ describe('EntityManagerSqlite', () => {\n expect(author.baseVersionAsString).toBe('v1');\n \n author.name = 'John Snow';\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.version).toBe(3);\n expect(author.versionAsString).toBe('v3');\n expect(author.baseVersion).toBe(3);\n@@ -777,15 +776,15 @@ describe('EntityManagerSqlite', () => {\n expect(Author3.afterDestroyCalled).toBe(0);\n expect(BaseEntity4.beforeDestroyCalled).toBe(0);\n expect(BaseEntity4.afterDestroyCalled).toBe(0);\n- await repo.remove(author).flush();\n+ await orm.em.remove(author).flush();\n expect(Author3.beforeDestroyCalled).toBe(2);\n expect(Author3.afterDestroyCalled).toBe(2);\n expect(BaseEntity4.beforeDestroyCalled).toBe(2);\n expect(BaseEntity4.afterDestroyCalled).toBe(2);\n \n const author2 = new Author3('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author2);\n- await repo.remove(author2).flush();\n+ await orm.em.persistAndFlush(author2);\n+ await orm.em.remove(author2).flush();\n expect(Author3.beforeDestroyCalled).toBe(4);\n expect(Author3.afterDestroyCalled).toBe(4);\n expect(BaseEntity4.beforeDestroyCalled).toBe(4);\n@@ -795,7 +794,7 @@ describe('EntityManagerSqlite', () => {\n test('trying to populate non-existing or non-reference property will throw', async () => {\n const repo = orm.em.getRepository(Author3);\n const author = new Author3('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n await expect(repo.findAll({ populate: ['tests'] as never })).rejects.toThrowError(`Entity 'Author3' does not have property 'tests'`);\n@@ -810,7 +809,7 @@ describe('EntityManagerSqlite', () => {\n const t3 = Test3.create('t3');\n await orm.em.persist([t1, t2, t3]).flush();\n publisher.tests.add(t2, t1, t3);\n- await repo.persistAndFlush(publisher);\n+ await orm.em.persistAndFlush(publisher);\n orm.em.clear();\n \n const ent = (await repo.findOne(publisher.id, { populate: ['tests'] }))!;\n@@ -828,11 +827,11 @@ describe('EntityManagerSqlite', () => {\n await expect(author.updatedAt).toBeDefined();\n // allow 1 ms difference as updated time is recalculated when persisting\n await expect(+author.updatedAt - +author.createdAt).toBeLessThanOrEqual(1);\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n \n author.name = 'name1';\n await new Promise(resolve => setTimeout(resolve, 10));\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n await expect(author.createdAt).toBeDefined();\n await expect(author.updatedAt).toBeDefined();\n await expect(author.updatedAt).not.toEqual(author.createdAt);\n", "EntityManager.sqlite2.test.ts": "@@ -35,7 +35,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n test('should convert entity to PK when trying to search by entity', async () => {\n const repo = orm.em.getRepository(Author4);\n const author = orm.em.create(Author4, { name: 'name', email: 'email' });\n- await repo.flush();\n+ await orm.em.flush();\n const a = await repo.findOne(author);\n const authors = await repo.find({ id: author.id });\n expect(a).toBe(author);\n@@ -45,7 +45,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n test('hydration with `forceUndefined` converts null values', async () => {\n const repo = orm.em.getRepository(Author4);\n const author = orm.em.create(Author4, { name: 'name', email: 'email' });\n- await repo.flush();\n+ await orm.em.flush();\n orm.em.clear();\n \n const a = await repo.findOneOrFail(author);\n@@ -290,7 +290,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n expect(lastBook[0].title).toBe('My Life on The Wall, part 1');\n expect(lastBook[0].author!.constructor.name).toBe('Author4');\n expect(wrap(lastBook[0].author!).isInitialized()).toBe(true);\n- await orm.em.getRepository(Book4).remove(lastBook[0]).flush();\n+ await orm.em.remove(lastBook[0]).flush();\n });\n \n test('findOne should initialize entity that is already in IM', async () => {\n@@ -557,7 +557,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository(Author4);\n const jon = orm.em.create(Author4, { name: 'Jon Snow', email: '[email protected]' });\n- await authorRepository.persistAndFlush(jon);\n+ await orm.em.persistAndFlush(jon);\n \n orm.em.clear();\n let author = (await authorRepository.findOne(jon.id))!;\n@@ -843,7 +843,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n test('trying to populate non-existing or non-reference property will throw', async () => {\n const repo = orm.em.getRepository(Author4);\n const author = orm.em.create(Author4, { name: 'Johny Cash', email: '[email protected]' });\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n orm.em.clear();\n \n await expect(repo.findAll({ populate: ['tests'] as never })).rejects.toThrowError(`Entity 'Author4' does not have property 'tests'`);\n@@ -858,7 +858,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n const t3 = orm.em.create(Test4, { name: 't3' });\n await orm.em.persist([t1, t2, t3]).flush();\n publisher.tests.add(t2, t1, t3);\n- await repo.persistAndFlush(publisher);\n+ await orm.em.persistAndFlush(publisher);\n orm.em.clear();\n \n const ent = (await repo.findOne(publisher.id, { populate: ['tests'] }))!;\n@@ -872,7 +872,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n test('property onUpdate hook (updatedAt field)', async () => {\n const repo = orm.em.getRepository(Author4);\n const author = orm.em.create(Author4, { name: 'name', email: 'email' });\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.createdAt).toBeDefined();\n expect(author.updatedAt).toBeDefined();\n // allow 1 ms difference as updated time is recalculated when persisting\n@@ -880,7 +880,7 @@ describe.each(['sqlite', 'better-sqlite'] as const)('EntityManager (%s)', driver\n \n author.name = 'name1';\n await new Promise(resolve => setTimeout(resolve, 10));\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n await expect(author.createdAt).toBeDefined();\n await expect(author.updatedAt).toBeDefined();\n await expect(author.updatedAt).not.toEqual(author.createdAt);\n", "EntityManager.sqlite3.test.ts": "@@ -1,4 +1,4 @@\n-import { Collection, EntityManager, MikroORM, QueryOrder } from '@mikro-orm/core';\n+import { EntityManager, MikroORM } from '@mikro-orm/core';\n import { SqliteDriver } from '@mikro-orm/sqlite';\n \n import { initORMSqlite3 } from './bootstrap';\n@@ -22,12 +22,12 @@ describe('EntityManagerSqlite fts5 table', () => {\n const book5 = new Book5('My Death in a grass field, part 5');\n \n const repo = orm.em.getRepository(Book5);\n- repo.persist(book1);\n- repo.persist(book2);\n- repo.persist(book3);\n- repo.persist(book4);\n- repo.persist(book5);\n- await repo.flush();\n+ orm.em.persist(book1);\n+ orm.em.persist(book2);\n+ orm.em.persist(book3);\n+ orm.em.persist(book4);\n+ orm.em.persist(book5);\n+ await orm.em.flush();\n orm.em.clear();\n \n expect((await repo.count())!).toBe(5);\n", "EntityRepository.test.ts": "@@ -1,4 +1,3 @@\n-import type { AnyEntity } from '@mikro-orm/core';\n import { Configuration, QueryOrder } from '@mikro-orm/core';\n import type { EntityManager } from '@mikro-orm/knex';\n import { EntityRepository } from '@mikro-orm/knex';\n@@ -8,8 +7,6 @@ import { MongoDriver, MongoEntityRepository } from '@mikro-orm/mongodb';\n \n const methods = {\n getReference: jest.fn(),\n- persist: jest.fn(),\n- persistAndFlush: jest.fn(),\n createQueryBuilder: jest.fn(),\n qb: jest.fn(),\n findOne: jest.fn(),\n@@ -19,9 +16,6 @@ const methods = {\n find: jest.fn(),\n findAndCount: jest.fn(),\n findByCursor: jest.fn(),\n- remove: jest.fn(),\n- removeAndFlush: jest.fn(),\n- flush: jest.fn(),\n canPopulate: jest.fn(),\n populate: jest.fn(),\n count: jest.fn(),\n@@ -49,10 +43,6 @@ describe('EntityRepository', () => {\n repo.getReference('bar');\n expect(methods.getReference.mock.calls[0]).toEqual([Publisher, 'bar', undefined]);\n const e = Object.create(Publisher.prototype);\n- repo.persist(e);\n- expect(methods.persist.mock.calls[0]).toEqual([e]);\n- await repo.persistAndFlush(e);\n- expect(methods.persistAndFlush.mock.calls[0]).toEqual([e]);\n await repo.find({ name: 'bar' });\n expect(methods.find.mock.calls[0]).toEqual([Publisher, { name: 'bar' }, undefined]);\n await repo.findAndCount({ name: 'bar' });\n@@ -71,11 +61,6 @@ describe('EntityRepository', () => {\n expect(methods.createQueryBuilder.mock.calls[0]).toEqual([Publisher, undefined]);\n await repo.qb();\n expect(methods.createQueryBuilder.mock.calls[0]).toEqual([Publisher, undefined]);\n- repo.remove(e);\n- expect(methods.remove.mock.calls[0]).toEqual([e]);\n- const entity = {} as AnyEntity;\n- await repo.removeAndFlush(entity);\n- expect(methods.removeAndFlush.mock.calls[0]).toEqual([entity]);\n await repo.create({ name: 'bar' });\n expect(methods.create.mock.calls[0]).toEqual([Publisher, { name: 'bar' }]);\n await repo.assign(e, { name: 'bar' });\n", "events.mysql.test.ts": "@@ -53,14 +53,13 @@ describe('events (mysql)', () => {\n expect(Author2Subscriber.log).toEqual([]);\n Author2.beforeDestroyCalled = 0;\n Author2.afterDestroyCalled = 0;\n- const repo = orm.em.getRepository(Author2);\n const author = new Author2('Jon Snow', '[email protected]');\n expect(author.id).toBeUndefined();\n expect(author.version).toBeUndefined();\n expect(author.versionAsString).toBeUndefined();\n expect(author.hookParams).toHaveLength(0);\n \n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.id).toBeDefined();\n expect(author.version).toBe(1);\n expect(author.versionAsString).toBe('v1');\n@@ -68,7 +67,7 @@ describe('events (mysql)', () => {\n expect(author.hookParams[0].changeSet).toMatchObject({ entity: author, type: 'create', payload: { name: 'Jon Snow' } });\n \n author.name = 'John Snow';\n- await repo.persistAndFlush(author);\n+ await orm.em.persistAndFlush(author);\n expect(author.version).toBe(2);\n expect(author.versionAsString).toBe('v2');\n expect(author.hookParams[2].em).toBe(orm.em);\n@@ -76,13 +75,13 @@ describe('events (mysql)', () => {\n \n expect(Author2.beforeDestroyCalled).toBe(0);\n expect(Author2.afterDestroyCalled).toBe(0);\n- await repo.removeAndFlush(author);\n+ await orm.em.removeAndFlush(author);\n expect(Author2.beforeDestroyCalled).toBe(1);\n expect(Author2.afterDestroyCalled).toBe(1);\n \n const author2 = new Author2('Johny Cash', '[email protected]');\n- await repo.persistAndFlush(author2);\n- await repo.removeAndFlush(author2);\n+ await orm.em.persistAndFlush(author2);\n+ await orm.em.removeAndFlush(author2);\n expect(Author2.beforeDestroyCalled).toBe(2);\n expect(Author2.afterDestroyCalled).toBe(2);\n \n", "full-text-search-tsvector.postgres.test.ts": "@@ -39,18 +39,15 @@ describe('full text search tsvector in postgres', () => {\n afterAll(() => orm.close(true));\n \n test('load entities', async () => {\n- const repo = orm.em.getRepository(Book);\n-\n const book1 = new Book('My Life on The ? Wall, part 1');\n- await repo.persist(book1).flush();\n+ await orm.em.persist(book1).flush();\n \n+ const repo = orm.em.getRepository(Book);\n const fullTextBooks = (await repo.find({ searchableTitle: { $fulltext: 'life wall' } }))!;\n expect(fullTextBooks.length).toBe(1);\n });\n \n test('load entities (multi)', async () => {\n- const repo = orm.em.getRepository(Book);\n-\n const book1 = new Book('My Life on The ? Wall, part 1');\n const book2 = new Book('My Life on The Wall, part 2');\n const book3 = new Book('My Life on The Wall, part 3');\n@@ -58,9 +55,9 @@ describe('full text search tsvector in postgres', () => {\n const book5 = new Book('My Life on The House');\n const book6 = new Book(null);\n \n- repo.persist([book1, book2, book3, book4, book5, book6]);\n- await repo.flush();\n+ await orm.em.persist([book1, book2, book3, book4, book5, book6]).flush();\n \n+ const repo = orm.em.getRepository(Book);\n const fullTextBooks = (await repo.find({ searchableTitle: { $fulltext: 'life wall' } }))!;\n expect(fullTextBooks).toHaveLength(3);\n });\n", "GH1902.test.ts": "@@ -90,21 +90,19 @@ describe('GH issue 1902', () => {\n });\n \n test(`GH issue 1902`, async () => {\n- const repoUser = orm.em.getRepository(UserEntity);\n const user = orm.em.create(UserEntity, { name: 'user one', email: 'one@email' });\n- await repoUser.persistAndFlush(user);\n+ await orm.em.flush();\n \n- const repoTenant = orm.em.getRepository(TenantEntity);\n const tenant1 = orm.em.create(TenantEntity, { name: 'tenant one', schema: 'tenant_one' });\n- await repoTenant.persistAndFlush(tenant1);\n+ await orm.em.flush();\n const tenant2 = orm.em.create(TenantEntity, { name: 'tenant two', schema: 'tenant_two' });\n- await repoTenant.persistAndFlush(tenant2);\n+ await orm.em.flush();\n \n const repoUserTenant = orm.em.getRepository(UserTenantEntity);\n- const ut1 = orm.em.create(UserTenantEntity, { user, tenant: tenant1, isActive: true });\n- await repoTenant.persistAndFlush(ut1);\n- const ut2 = orm.em.create(UserTenantEntity, { user, tenant: tenant2, isActive: false });\n- await repoTenant.persistAndFlush(ut2);\n+ orm.em.create(UserTenantEntity, { user, tenant: tenant1, isActive: true });\n+ await orm.em.flush();\n+ orm.em.create(UserTenantEntity, { user, tenant: tenant2, isActive: false });\n+ await orm.em.flush();\n orm.em.clear();\n \n const findOpts = {\n", "GH3988.test.ts": "@@ -89,8 +89,6 @@ afterAll(async () => {\n });\n \n it('should create and persist entity along with child entity', async () => {\n- const parentRepository = orm.em.fork().getRepository(ParentEntity);\n-\n // Create parent\n const parent = new ParentEntity();\n parent.id = new Id(1);\n@@ -104,7 +102,7 @@ it('should create and persist entity along with child entity', async () => {\n parent.children.add(child);\n \n const mock = mockLogger(orm);\n- await parentRepository.persistAndFlush(parent);\n+ await orm.em.persistAndFlush(parent);\n expect(mock.mock.calls).toEqual([\n ['[query] begin'],\n ['[query] insert into `parent_entity` (`id`, `id2`) values (1, 2)'],\n", "GH4057.test.ts": "@@ -20,12 +20,11 @@ test('null dates stay null when fetched', async () => {\n \n await orm.getSchemaGenerator().refreshDatabase();\n \n- const repo = orm.em.fork().getRepository(Test);\n- const c = repo.create({\n+ orm.em.create(Test, {\n id: '123',\n date: undefined,\n });\n- await repo.persistAndFlush(c);\n+ await orm.em.flush();\n \n const entity = await orm.em.fork().findOne(Test, '123');\n \n"}
build: updated deps
14cc57f7752271d870553e7283798ebcf3fd2e05
build
https://github.com/tsparticles/tsparticles/commit/14cc57f7752271d870553e7283798ebcf3fd2e05
updated deps
{"package.json": "@@ -30,9 +30,9 @@\n \"@tsparticles/prettier-config\": \"^1.9.0\",\n \"@tsparticles/tsconfig\": \"^1.12.0\",\n \"@tsparticles/webpack-plugin\": \"^1.13.0\",\n- \"@typescript-eslint/eslint-plugin\": \"^5.59.0\",\n- \"@typescript-eslint/parser\": \"^5.59.0\",\n- \"eslint\": \"^8.39.0\",\n+ \"@typescript-eslint/eslint-plugin\": \"^5.59.2\",\n+ \"@typescript-eslint/parser\": \"^5.59.2\",\n+ \"eslint\": \"^8.40.0\",\n \"eslint-config-prettier\": \"^8.8.0\",\n \"fs-extra\": \"^11.1.1\",\n \"jsdom\": \"^22.0.0\",\n@@ -108,7 +108,7 @@\n \"tsparticles-updater-twinkle\": \"^2.9.3\",\n \"tsparticles-updater-wobble\": \"^2.9.3\",\n \"typescript\": \"^5.0.4\",\n- \"webpack\": \"^5.80.0\"\n+ \"webpack\": \"^5.82.0\"\n },\n \"devDependencies\": {\n \"@types/eslint\": \"^8.37.0\",\n@@ -116,8 +116,8 @@\n \"@types/jsdom\": \"^21.1.1\",\n \"@types/klaw\": \"^3.0.3\",\n \"@types/lodash\": \"^4.14.194\",\n- \"@types/node\": \"^18.16.0\",\n+ \"@types/node\": \"^20.1.0\",\n \"@types/prettier\": \"^2.7.2\",\n- \"@types/rimraf\": \"^3.0.2\"\n+ \"@types/rimraf\": \"^4.0.5\"\n }\n }\n", "pnpm-lock.yaml": "@@ -5,29 +5,29 @@ importers:\n .:\n devDependencies:\n '@commitlint/cli':\n- specifier: ^17.6.1\n- version: 17.6.1\n+ specifier: ^17.6.3\n+ version: 17.6.3\n '@commitlint/config-conventional':\n- specifier: ^17.6.1\n- version: 17.6.1\n+ specifier: ^17.6.3\n+ version: 17.6.3\n '@nrwl/cli':\n specifier: ^15.9.3\n version: 15.9.3\n '@nrwl/devkit':\n- specifier: ^16.0.3\n- version: 16.0.3([email protected])\n+ specifier: ^16.1.1\n+ version: 16.1.1([email protected])\n '@nrwl/nx-cloud':\n specifier: ^16.0.5\n version: 16.0.5\n '@nrwl/workspace':\n- specifier: ^16.0.3\n- version: 16.0.3\n+ specifier: ^16.1.1\n+ version: 16.1.1\n '@tsparticles/prettier-config':\n specifier: ^1.9.0\n version: 1.9.0\n '@types/node':\n- specifier: ^18.16.3\n- version: 18.16.3\n+ specifier: ^20.1.0\n+ version: 20.1.0\n copyfiles:\n specifier: ^2.4.1\n version: 2.4.1\n@@ -38,11 +38,11 @@ importers:\n specifier: ^4.1.0\n version: 4.1.0\n lerna:\n- specifier: ^6.6.1\n- version: 6.6.1\n+ specifier: ^6.6.2\n+ version: 6.6.2\n nx:\n- specifier: ^16.0.3\n- version: 16.0.3\n+ specifier: ^16.1.1\n+ version: 16.1.1\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -141,8 +141,8 @@ importers:\n version: link:../../updaters/wobble/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -162,14 +162,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -177,11 +177,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -190,19 +190,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n bundles/fireworks:\n@@ -251,8 +251,8 @@ importers:\n version: link:../../updaters/strokeColor/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -272,14 +272,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -287,11 +287,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -300,19 +300,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n bundles/full:\n@@ -349,8 +349,8 @@ importers:\n version: link:../../updaters/wobble/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -370,14 +370,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -385,11 +385,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -398,19 +398,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n bundles/pjs:\n@@ -420,8 +420,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -441,14 +441,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -456,11 +456,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -469,19 +469,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n bundles/slim:\n@@ -584,8 +584,8 @@ importers:\n version: link:../../updaters/strokeColor/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -605,14 +605,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -620,11 +620,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -633,19 +633,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n demo/electron:\n@@ -661,11 +661,11 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n electron:\n- specifier: ^24.1.2\n- version: 24.1.2\n+ specifier: ^24.2.0\n+ version: 24.2.0\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n \n demo/vanilla:\n devDependencies:\n@@ -673,14 +673,14 @@ importers:\n specifier: ^6.4.0\n version: 6.4.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n ace-builds:\n- specifier: ^1.18.0\n- version: 1.18.0\n+ specifier: ^1.19.0\n+ version: 1.19.0\n bootstrap:\n specifier: ^5.2.3\n version: 5.2.3(@popperjs/[email protected])\n@@ -688,11 +688,11 @@ importers:\n specifier: ^0.6.1\n version: 0.6.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n express:\n specifier: ^4.18.2\n version: 4.18.2\n@@ -979,8 +979,8 @@ importers:\n specifier: 5.0.0\n version: 5.0.0\n sass:\n- specifier: ^1.62.0\n- version: 1.62.0\n+ specifier: ^1.62.1\n+ version: 1.62.1\n tsparticles:\n specifier: ^2.9.3\n version: link:../../bundles/full/dist\n@@ -1021,8 +1021,8 @@ importers:\n engine:\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@skypack/package-check':\n specifier: ^0.2.2\n version: 0.2.2\n@@ -1042,8 +1042,8 @@ importers:\n specifier: ^1.13.0\n version: 1.13.0\n '@types/chai':\n- specifier: ^4.3.4\n- version: 4.3.4\n+ specifier: ^4.3.5\n+ version: 4.3.5\n '@types/jsdom':\n specifier: ^21.1.1\n version: 21.1.1\n@@ -1051,20 +1051,20 @@ importers:\n specifier: ^10.0.1\n version: 10.0.1\n '@types/node':\n- specifier: ^18.16.0\n- version: 18.16.0\n+ specifier: ^20.1.0\n+ version: 20.1.0\n '@types/webpack-env':\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1075,20 +1075,20 @@ importers:\n specifier: ^4.3.7\n version: 4.3.7\n compare-versions:\n- specifier: ^5.0.3\n- version: 5.0.3\n+ specifier: 6.0.0-rc.1\n+ version: 6.0.0-rc.1\n copyfiles:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n eslint-plugin-jsdoc:\n- specifier: ^43.0.7\n- version: 43.0.7([email protected])\n+ specifier: ^43.2.0\n+ version: 43.2.0([email protected])\n eslint-plugin-tsdoc:\n specifier: ^0.2.17\n version: 0.2.17\n@@ -1124,13 +1124,13 @@ importers:\n version: 0.5.21\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n ts-json-schema-generator:\n specifier: ^1.2.0\n version: 1.2.0\n ts-node:\n specifier: ^10.9.1\n- version: 10.9.1(@types/[email protected])([email protected])\n+ version: 10.9.1(@types/[email protected])([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n@@ -1138,14 +1138,14 @@ importers:\n specifier: ^0.56.0\n version: 0.56.0\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/attract:\n@@ -1155,8 +1155,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1176,14 +1176,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1191,11 +1191,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1204,19 +1204,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/bounce:\n@@ -1226,8 +1226,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1247,14 +1247,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1262,11 +1262,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1275,19 +1275,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/bubble:\n@@ -1297,8 +1297,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1318,14 +1318,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1333,11 +1333,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1346,19 +1346,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/connect:\n@@ -1368,8 +1368,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1389,14 +1389,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1404,11 +1404,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1417,19 +1417,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/grab:\n@@ -1439,8 +1439,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1460,14 +1460,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1475,11 +1475,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1488,19 +1488,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/pause:\n@@ -1510,8 +1510,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1531,14 +1531,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1546,11 +1546,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1559,19 +1559,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/push:\n@@ -1581,8 +1581,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1602,14 +1602,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1617,11 +1617,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1630,19 +1630,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/remove:\n@@ -1652,8 +1652,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1673,14 +1673,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1688,11 +1688,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1701,19 +1701,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/repulse:\n@@ -1723,8 +1723,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1744,14 +1744,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1759,11 +1759,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1772,19 +1772,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/slow:\n@@ -1794,8 +1794,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1815,14 +1815,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1830,11 +1830,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1843,19 +1843,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/external/trail:\n@@ -1865,8 +1865,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1886,14 +1886,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1901,11 +1901,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1914,19 +1914,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/light:\n@@ -1936,8 +1936,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -1957,14 +1957,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -1972,11 +1972,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -1985,19 +1985,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/particles/attract:\n@@ -2007,8 +2007,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2028,14 +2028,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2043,11 +2043,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2056,19 +2056,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/particles/collisions:\n@@ -2078,8 +2078,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2099,14 +2099,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2114,11 +2114,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2127,19 +2127,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/particles/links:\n@@ -2149,8 +2149,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2170,14 +2170,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2185,11 +2185,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2198,19 +2198,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n interactions/particles/repulse:\n@@ -2220,8 +2220,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2241,14 +2241,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2256,11 +2256,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2269,19 +2269,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n move/base:\n@@ -2291,8 +2291,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2312,14 +2312,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2327,11 +2327,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2340,19 +2340,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n move/parallax:\n@@ -2362,8 +2362,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2383,14 +2383,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2398,11 +2398,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2411,19 +2411,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n paths/curves:\n@@ -2433,8 +2433,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2454,14 +2454,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2469,11 +2469,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2482,19 +2482,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n paths/perlinNoise:\n@@ -2504,8 +2504,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2525,14 +2525,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2540,11 +2540,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2553,19 +2553,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n paths/polygon:\n@@ -2575,8 +2575,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2596,14 +2596,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2611,11 +2611,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2624,19 +2624,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n paths/simplexNoise:\n@@ -2646,8 +2646,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2667,14 +2667,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2682,11 +2682,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2695,19 +2695,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n paths/svg:\n@@ -2717,8 +2717,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2738,14 +2738,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2753,11 +2753,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2766,19 +2766,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/absorbers:\n@@ -2788,8 +2788,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2809,14 +2809,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2824,11 +2824,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2837,19 +2837,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/canvasMask:\n@@ -2859,8 +2859,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2880,14 +2880,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2895,11 +2895,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2908,19 +2908,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/back:\n@@ -2930,8 +2930,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -2951,14 +2951,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -2966,11 +2966,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -2979,19 +2979,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/circ:\n@@ -3001,8 +3001,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3022,14 +3022,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3037,11 +3037,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3050,19 +3050,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/cubic:\n@@ -3072,8 +3072,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3093,14 +3093,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3108,11 +3108,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3121,19 +3121,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/expo:\n@@ -3143,8 +3143,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3164,14 +3164,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3179,11 +3179,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3192,19 +3192,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/quad:\n@@ -3214,8 +3214,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3235,14 +3235,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3250,11 +3250,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3263,19 +3263,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/quart:\n@@ -3285,8 +3285,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3306,14 +3306,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3321,11 +3321,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3334,19 +3334,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/quint:\n@@ -3356,8 +3356,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3377,14 +3377,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3392,11 +3392,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3405,19 +3405,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/easings/sine:\n@@ -3427,8 +3427,8 @@ importers:\n version: link:../../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3448,14 +3448,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3463,11 +3463,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3476,19 +3476,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/emitters:\n@@ -3498,8 +3498,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3519,14 +3519,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3534,11 +3534,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3547,19 +3547,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/hsvColor:\n@@ -3569,8 +3569,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3590,14 +3590,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3605,11 +3605,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3618,19 +3618,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/infection:\n@@ -3640,8 +3640,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3661,14 +3661,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3676,11 +3676,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3689,19 +3689,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/motion:\n@@ -3711,8 +3711,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3732,14 +3732,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3747,11 +3747,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3760,19 +3760,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/polygonMask:\n@@ -3782,8 +3782,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3803,14 +3803,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3818,11 +3818,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3831,19 +3831,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n plugins/sounds:\n@@ -3853,8 +3853,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3874,14 +3874,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3889,11 +3889,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3902,19 +3902,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/bubble:\n@@ -3924,8 +3924,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -3945,14 +3945,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -3960,11 +3960,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -3973,19 +3973,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/cards:\n@@ -3995,8 +3995,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4016,14 +4016,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4031,11 +4031,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4044,19 +4044,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/circle:\n@@ -4066,8 +4066,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4087,14 +4087,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4102,11 +4102,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4115,19 +4115,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/cog:\n@@ -4137,8 +4137,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4158,14 +4158,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4173,11 +4173,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4186,19 +4186,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/heart:\n@@ -4208,8 +4208,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4229,14 +4229,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4244,11 +4244,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4257,19 +4257,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/image:\n@@ -4279,8 +4279,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4300,14 +4300,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4315,11 +4315,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4328,19 +4328,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/line:\n@@ -4350,8 +4350,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4371,14 +4371,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4386,11 +4386,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4399,19 +4399,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/multiline-text:\n@@ -4421,8 +4421,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4442,14 +4442,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4457,11 +4457,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4470,19 +4470,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/path:\n@@ -4492,8 +4492,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4513,14 +4513,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4528,11 +4528,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4541,19 +4541,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/polygon:\n@@ -4563,8 +4563,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4584,14 +4584,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4599,11 +4599,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4612,19 +4612,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/rounded-rect:\n@@ -4634,8 +4634,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4655,14 +4655,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4670,11 +4670,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4683,19 +4683,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/spiral:\n@@ -4705,8 +4705,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4726,14 +4726,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4741,11 +4741,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4754,19 +4754,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/square:\n@@ -4776,8 +4776,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4797,14 +4797,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4812,11 +4812,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4825,19 +4825,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/star:\n@@ -4847,8 +4847,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4868,14 +4868,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4883,11 +4883,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4896,19 +4896,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n shapes/text:\n@@ -4918,8 +4918,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -4939,14 +4939,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -4954,11 +4954,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -4967,19 +4967,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/angle:\n@@ -4989,8 +4989,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5010,14 +5010,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5025,11 +5025,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5038,19 +5038,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/color:\n@@ -5060,8 +5060,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5081,14 +5081,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5096,11 +5096,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5109,19 +5109,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/destroy:\n@@ -5131,8 +5131,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5152,14 +5152,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5167,11 +5167,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5180,19 +5180,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/gradient:\n@@ -5202,8 +5202,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5223,14 +5223,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5238,11 +5238,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5251,19 +5251,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/life:\n@@ -5273,8 +5273,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5294,14 +5294,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5309,11 +5309,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5322,19 +5322,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/opacity:\n@@ -5344,8 +5344,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5365,14 +5365,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5380,11 +5380,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5393,19 +5393,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/orbit:\n@@ -5415,8 +5415,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5436,14 +5436,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5451,11 +5451,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5464,19 +5464,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/outModes:\n@@ -5486,8 +5486,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5507,14 +5507,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5522,11 +5522,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5535,19 +5535,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/roll:\n@@ -5557,8 +5557,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5578,14 +5578,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5593,11 +5593,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5606,19 +5606,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/size:\n@@ -5628,8 +5628,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5649,14 +5649,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5664,11 +5664,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5677,19 +5677,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/strokeColor:\n@@ -5699,8 +5699,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5720,14 +5720,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5735,11 +5735,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5748,19 +5748,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/tilt:\n@@ -5770,8 +5770,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5791,14 +5791,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5806,11 +5806,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5819,19 +5819,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/twinkle:\n@@ -5841,8 +5841,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5862,14 +5862,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5877,11 +5877,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5890,19 +5890,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n updaters/wobble:\n@@ -5912,8 +5912,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/cli':\n specifier: ^1.2.0\n version: 1.2.0([email protected])\n@@ -5933,14 +5933,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -5948,11 +5948,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -5961,19 +5961,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n utils/configs:\n@@ -5983,8 +5983,8 @@ importers:\n version: link:../../engine/dist\n devDependencies:\n '@babel/core':\n- specifier: ^7.21.4\n- version: 7.21.4\n+ specifier: ^7.21.8\n+ version: 7.21.8\n '@tsparticles/eslint-config':\n specifier: ^1.11.0\n version: 1.11.0\n@@ -6001,14 +6001,14 @@ importers:\n specifier: ^1.18.0\n version: 1.18.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n babel-loader:\n specifier: ^9.1.2\n- version: 9.1.2(@babel/[email protected])([email protected])\n+ version: 9.1.2(@babel/[email protected])([email protected])\n browserslist:\n specifier: ^4.21.5\n version: 4.21.5\n@@ -6016,11 +6016,11 @@ importers:\n specifier: ^2.4.1\n version: 2.4.1\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n prettier:\n specifier: ^2.8.8\n version: 2.8.8\n@@ -6029,19 +6029,19 @@ importers:\n version: 5.0.0\n terser-webpack-plugin:\n specifier: ^5.3.7\n- version: 5.3.7([email protected])\n+ version: 5.3.7([email protected])\n typescript:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n webpack-bundle-analyzer:\n specifier: ^4.8.0\n version: 4.8.0\n webpack-cli:\n specifier: ^5.0.2\n- version: 5.0.2([email protected])([email protected])\n+ version: 5.0.2([email protected])([email protected])\n publishDirectory: dist\n \n utils/options-updater:\n@@ -6059,17 +6059,17 @@ importers:\n specifier: ^1.13.0\n version: 1.13.0\n '@typescript-eslint/eslint-plugin':\n- specifier: ^5.59.0\n- version: 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n '@typescript-eslint/parser':\n- specifier: ^5.59.0\n- version: 5.59.0([email protected])([email protected])\n+ specifier: ^5.59.2\n+ version: 5.59.2([email protected])([email protected])\n eslint:\n- specifier: ^8.39.0\n- version: 8.39.0\n+ specifier: ^8.40.0\n+ version: 8.40.0\n eslint-config-prettier:\n specifier: ^8.8.0\n- version: 8.8.0([email protected])\n+ version: 8.8.0([email protected])\n fs-extra:\n specifier: ^11.1.1\n version: 11.1.1\n@@ -6293,8 +6293,8 @@ importers:\n specifier: ^5.0.4\n version: 5.0.4\n webpack:\n- specifier: ^5.80.0\n- version: 5.80.0([email protected])\n+ specifier: ^5.82.0\n+ version: 5.82.0([email protected])\n devDependencies:\n '@types/eslint':\n specifier: ^8.37.0\n@@ -6312,14 +6312,14 @@ importers:\n specifier: ^4.14.194\n version: 4.14.194\n '@types/node':\n- specifier: ^18.16.0\n- version: 18.16.0\n+ specifier: ^20.1.0\n+ version: 20.1.0\n '@types/prettier':\n specifier: ^2.7.2\n version: 2.7.2\n '@types/rimraf':\n- specifier: ^3.0.2\n- version: 3.0.2\n+ specifier: ^4.0.5\n+ version: 4.0.5\n \n packages:\n \n@@ -6340,24 +6340,24 @@ packages:\n dependencies:\n '@babel/highlight': 7.18.6\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-KYMqFYTaenzMK4yUtf4EW9wc4N9ef80FsbMtkwool5zpwl4YrT1SdWYSTRcT94KO4hannogdS+LxY7L+arP3gA==}\n engines: {node: '>=6.9.0'}\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-YeM22Sondbo523Sz0+CirSPnbj9bG3P0CdHcBZdqUuaeOaYEFbOLoGU7lebvGP6P5J/WE9wOn7u7C4J9HvS1xQ==}\n engines: {node: '>=6.9.0'}\n dependencies:\n '@ampproject/remapping': 2.2.1\n '@babel/code-frame': 7.21.4\n- '@babel/generator': 7.21.4\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n- '@babel/helper-module-transforms': 7.21.2\n- '@babel/helpers': 7.21.0\n- '@babel/parser': 7.21.4\n+ '@babel/generator': 7.21.5\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n+ '@babel/helper-module-transforms': 7.21.5\n+ '@babel/helpers': 7.21.5\n+ '@babel/parser': 7.21.8\n '@babel/template': 7.20.7\n- '@babel/traverse': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/traverse': 7.21.5\n+ '@babel/types': 7.21.5\n convert-source-map: 1.9.0\n debug: 4.3.4([email protected])\n gensync: 1.0.0-beta.2\n@@ -6366,11 +6366,11 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-SrKK/sRv8GesIW1bDagf9cCG38IOMYZusoe1dfg0D8aiUe3Amvoj1QtjTPAWcfrZFvIwlleLb0gxzQidL9w14w==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n '@jridgewell/gen-mapping': 0.3.3\n '@jridgewell/trace-mapping': 0.3.18\n jsesc: 2.5.2\n@@ -6379,64 +6379,65 @@ packages:\n resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-uNrjKztPLkUk7bpCNC0jEKDJzzkvel/W+HguzbN8krA+LPfC1CEobJEvAvGka2A/M+ViOqXdcRL0GqPUJSjx9g==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/helper-explode-assignable-expression': 7.18.6\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-1RkbFGUKex4lvsB9yhIfWltJM5cZKUftB2eNajaDv3dCMEp49iBG0K14uH8NnX9IPux2+mK7JGEOB0jn48/J6w==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/compat-data': 7.21.4\n- '@babel/core': 7.21.4\n+ '@babel/compat-data': 7.21.7\n+ '@babel/core': 7.21.8\n '@babel/helper-validator-option': 7.21.0\n browserslist: 4.21.5\n lru-cache: 5.1.1\n semver: 6.3.0\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-46QrX2CQlaFRF4TkwfTt6nJD7IHq8539cCL7SDpqWSDeJKY1xylKKY5F/33mJhLZ3mFvKv2gGrVS6NkyF6qs+Q==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-+THiN8MqiH2AczyuZrnrKL6cAxFRRQDKW9h1YkBvbgKmAm6mwiacig1qT73DHIWMGo40GRnsEfN3LA+E6NtmSw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-annotate-as-pure': 7.18.6\n- '@babel/helper-environment-visitor': 7.18.9\n+ '@babel/helper-environment-visitor': 7.21.5\n '@babel/helper-function-name': 7.21.0\n- '@babel/helper-member-expression-to-functions': 7.21.0\n+ '@babel/helper-member-expression-to-functions': 7.21.5\n '@babel/helper-optimise-call-expression': 7.18.6\n- '@babel/helper-replace-supers': 7.20.7\n+ '@babel/helper-replace-supers': 7.21.5\n '@babel/helper-skip-transparent-expression-wrappers': 7.20.0\n '@babel/helper-split-export-declaration': 7.18.6\n+ semver: 6.3.0\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-M00OuhU+0GyZ5iBBN9czjugzWrEq2vDpf/zCYHxxf93ul/Q5rv+a5h+/+0WnI1AebHNVtl5bFV0qsJoH23DbfA==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-zGuSdedkFtsFHGbexAvNuipg1hbtitDLo2XE8/uf6Y9sOQV1xsYX/2pNbtedp/X0eU1pIt+kGvaqHCowkRbS5g==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-annotate-as-pure': 7.18.6\n regexpu-core: 5.3.2\n+ semver: 6.3.0\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww==}\n peerDependencies:\n '@babel/core': ^7.4.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n debug: 4.3.4([email protected])\n lodash.debounce: 4.0.8\n resolve: 1.22.2\n@@ -6444,53 +6445,47 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==}\n- engines: {node: '>=6.9.0'}\n-\n- /@babel/[email protected]:\n- resolution: {integrity: sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-IYl4gZ3ETsWocUWgsFZLM5i1BYx9SoemminVEXadgLBa9TdeorzgLKm8wWLA6J1N/kT3Kch8XIk1laNzYoHKvQ==}\n engines: {node: '>=6.9.0'}\n- dependencies:\n- '@babel/types': 7.21.4\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n '@babel/template': 7.20.7\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-nIcGfgwpH2u4n9GG1HpStW5Ogx7x7ekiFHbjjFRKXbn5zUvqO9ZgotCO4x1aNbKn/x/xOUaXEhyNHCwtFCpxWg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-orajc5T2PsRYUN3ZryCEFeMDYwyw09c/pZeaQEZPH0MpKzSvn3e0uXsDBu3k03VI+9DBiRo+l22BfKTpKwa/Wg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-bI2Z9zBGY2q5yMHoBvJ2a9iX3ZOAzJPm7Q8Yz6YeoUjU/Cvhmi2G4QyTNyPBqqXSgTjUxRg3L0xV45HvkNWWBw==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/helper-environment-visitor': 7.18.9\n+ '@babel/helper-environment-visitor': 7.21.5\n '@babel/helper-module-imports': 7.21.4\n- '@babel/helper-simple-access': 7.20.2\n+ '@babel/helper-simple-access': 7.21.5\n '@babel/helper-split-export-declaration': 7.18.6\n '@babel/helper-validator-identifier': 7.19.1\n '@babel/template': 7.20.7\n- '@babel/traverse': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/traverse': 7.21.5\n+ '@babel/types': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n@@ -6498,59 +6493,59 @@ packages:\n resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-0WDaIlXKOX/3KfBK/dwP1oQGiPh6rjMkT7HIRv7i5RR2VUMwrx5ZL0dwBkKx7+SW1zwNdgjHd34IMk5ZjTeHVg==}\n engines: {node: '>=6.9.0'}\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-annotate-as-pure': 7.18.6\n- '@babel/helper-environment-visitor': 7.18.9\n+ '@babel/helper-environment-visitor': 7.21.5\n '@babel/helper-wrap-function': 7.20.5\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-/y7vBgsr9Idu4M6MprbOVUfH3vs7tsIfnVWv/Ml2xgwvyH6LTngdfbf5AdsKwkJy4zgy1X/kuNrEKvhhK28Yrg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/helper-environment-visitor': 7.18.9\n- '@babel/helper-member-expression-to-functions': 7.21.0\n+ '@babel/helper-environment-visitor': 7.21.5\n+ '@babel/helper-member-expression-to-functions': 7.21.5\n '@babel/helper-optimise-call-expression': 7.18.6\n '@babel/template': 7.20.7\n- '@babel/traverse': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/traverse': 7.21.5\n+ '@babel/types': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-ENPDAMC1wAjR0uaCUwliBdiSl1KBJAVnMTzXqi64c2MG8MPR6ii4qf7bSXDqSFbr4W6W028/rf5ivoHop5/mkg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-5pTUx3hAJaZIdW99sJ6ZUUgWq/Y+Hja7TowEnLNMm1VivRgZQL3vpBY3qUACVsvw+yQU6+YgfBVmcbLaZtrA1w==}\n engines: {node: '>=6.9.0'}\n \n /@babel/[email protected]:\n@@ -6567,18 +6562,18 @@ packages:\n dependencies:\n '@babel/helper-function-name': 7.21.0\n '@babel/template': 7.20.7\n- '@babel/traverse': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/traverse': 7.21.5\n+ '@babel/types': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-BSY+JSlHxOmGsPTydUkPf1MdMQ3M81x5xGCOVgWM3G8XH77sJ292Y2oqcp0CbbgxhqBuI46iUz1tT7hqP7EfgA==}\n engines: {node: '>=6.9.0'}\n dependencies:\n '@babel/template': 7.20.7\n- '@babel/traverse': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/traverse': 7.21.5\n+ '@babel/types': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n@@ -6590,757 +6585,766 @@ packages:\n chalk: 2.4.2\n js-tokens: 4.0.0\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-6zavDGdzG3gUqAdWvlLFfk+36RilI+Pwyuuh7HItyeScCWP3k6i8vKclAQ0bM/0y/Kz/xiwvxhMv9MgTJP5gmA==}\n engines: {node: '>=6.0.0'}\n hasBin: true\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n \n- /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.18.6(@babel/[email protected]):\n+ /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.18.6(@babel/[email protected]):\n resolution: {integrity: sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-sbr9+wNE5aXMBBFBICk01tt7sBf2Oc9ikRFEcem/ZORup9IMUdNhW7/wVLEbbtlWOsEubJet46mHAL2C8+2jKQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.13.0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/helper-skip-transparent-expression-wrappers': 7.20.0\n- '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-environment-visitor': 7.18.9\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/helper-remap-async-to-generator': 7.18.9(@babel/[email protected])\n- '@babel/plugin-syntax-async-generators': 7.8.4(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-environment-visitor': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/helper-remap-async-to-generator': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-syntax-async-generators': 7.8.4(@babel/[email protected])\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-class-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-class-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.12.0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-class-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-class-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/[email protected])\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-json-strings': 7.8.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-json-strings': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/compat-data': 7.21.4\n- '@babel/core': 7.21.4\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/[email protected])\n- '@babel/plugin-transform-parameters': 7.21.3(@babel/[email protected])\n+ '@babel/compat-data': 7.21.7\n+ '@babel/core': 7.21.8\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-transform-parameters': 7.21.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/helper-skip-transparent-expression-wrappers': 7.20.0\n- '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/[email protected])\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-class-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-class-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-ha4zfehbJjc5MmXBlHec1igel5TJXXLDDRbuJ4+XT2TJcyD9/V1919BA8gMvsdHcNMBy4WBUBiRb3nw/EQUtBw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-annotate-as-pure': 7.18.6\n- '@babel/helper-create-class-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/[email protected])\n+ '@babel/helper-create-class-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/[email protected])\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==}\n engines: {node: '>=4'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-regexp-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==}\n+ peerDependencies:\n+ '@babel/core': ^7.0.0-0\n+ dependencies:\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-wb1mhwGOCaXHDTcsRYMKF9e5bbMgqwxtqa2Y1ifH96dXJPwbuLX9qHy3clhrxVqgMz7nyNXs8VkxdH8UBcjKqA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-Uo5gwHPT9vgnSXQxqGtpdufUiWp96gk7yiP4Mp5bm1QMkEmLXBO7PAGYbKoJ6DhAwiNkcHFBol/x5zZZkL/t0Q==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-module-imports': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/helper-remap-async-to-generator': 7.18.9(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/helper-remap-async-to-generator': 7.18.9(@babel/[email protected])\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-annotate-as-pure': 7.18.6\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n- '@babel/helper-environment-visitor': 7.18.9\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n+ '@babel/helper-environment-visitor': 7.21.5\n '@babel/helper-function-name': 7.21.0\n '@babel/helper-optimise-call-expression': 7.18.6\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/helper-replace-supers': 7.20.7\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/helper-replace-supers': 7.21.5\n '@babel/helper-split-export-declaration': 7.18.6\n globals: 11.12.0\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-TR653Ki3pAwxBxUe8srfF3e4Pe3FTA46uaNHYyQwIoM4oWKSoOZiDNyHJ0oIoDIUPSRQbQG7jzgVBX3FPVne1Q==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/template': 7.20.7\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-bp6hwMFzuiE4HqYEyoGJ/V2LeIWn+hLVKc4pnj++E5XQptwhtcGmSayM029d/j2X1bPKGTlsyPwAubuU22KhMA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-regexp-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-builder-binary-assignment-operator-visitor': 7.18.9\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-builder-binary-assignment-operator-visitor': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-nYWpjKW/7j/I/mZkGVgHJXh4bA1sfdFnJoOXwJuj4m3Q2EraO/8ZyrkCau9P5tbHQk01RMSt6KYLCsW7730SXQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n '@babel/helper-function-name': 7.21.0\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-NuzCt5IIYOW0O30UvqktzHYR2ud5bOWbY0yaxWZ6G+aFzOMJvrs5YHNikrbdaT15+KNO31nPOy5Fim3ku6Zb5g==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-module-transforms': 7.21.2\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-module-transforms': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-Cln+Yy04Gxua7iPdj6nOV96smLGjpElir5YwzF0LBPKoPlLDNJePNlrGGaybAJkd0zKRnOVXOgizSqPYMNYkzA==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-OVryBEgKUbtqMoB7eG2rs6UFexJi6Zj6FDXx+esBLPTCxCNxAY9o+8Di7IsUGJ+AVhp5ncK0fxWUBd0/1gPhrQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-module-transforms': 7.21.2\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/helper-simple-access': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-module-transforms': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/helper-simple-access': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-vVu5g9BPQKSFEmvt2TA4Da5N+QVS66EX21d8uoOihC+OCpUoGvzVsXeqFdtAEfVa5BILAeFt+U7yVmLbQnAJmw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@babel/helper-hoist-variables': 7.18.6\n- '@babel/helper-module-transforms': 7.21.2\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/helper-module-transforms': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/helper-validator-identifier': 7.19.1\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-module-transforms': 7.21.2\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-module-transforms': 7.21.5\n+ '@babel/helper-plugin-utils': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-mOW4tTzi5iTLnw+78iEq3gr8Aoq4WNRGpmSlrogqaiCBoR1HFhpU4JkpQFOHfeYx3ReVIFWOQJS4aZBRvuZ6mA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-regexp-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/helper-replace-supers': 7.20.7\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/helper-replace-supers': 7.21.5\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-Wxc+TvppQG9xWFYatvCGPvZ6+SIUxQ2ZdiBP+PHYMIjnPXD+uThCshaz4NZOnODAtBjjcVQQ/3OKs9LW28purQ==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-kW/oO7HPBtntbsahzQ0qSE3tFvkFwnbozz3NWFhLGqH75vLEg+sCGngLlhVkePlCs3Jv0dBBHDzCHxNiFAQKCQ==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-ZoYBKDb6LyMi5yCsByQ5jmXsHAQDDYeexT1Szvlmui+lADvfSecr5Dxd/PkrTC3pAD182Fcju1VQkB4oCp9M+w==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n regenerator-transform: 0.15.1\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/helper-skip-transparent-expression-wrappers': 7.20.0\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-LYm/gTOwZqsYohlvFUe/8Tujz75LqqVC2w+2qPHLR+WyWHGCZPN1KBpJCJn+4Bk4gOkQy/IXKIge6az5MqwlOg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-create-regexp-features-plugin': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/core': 7.21.8\n+ '@babel/helper-create-regexp-features-plugin': 7.21.8(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n \n- /@babel/[email protected](@babel/[email protected]):\n- resolution: {integrity: sha512-2W57zHs2yDLm6GD5ZpvNn71lZ0B/iypSdIeq25OurDKji6AdzV07qp4s3n1/x5BqtiGaTrPN3nerlSCaC5qNTw==}\n+ /@babel/[email protected](@babel/[email protected]):\n+ resolution: {integrity: sha512-wH00QnTTldTbf/IefEVyChtRdw5RJvODT/Vb4Vcxq1AZvtXj6T0YeX0cAcXhI6/BdGuiP3GcNIL4OQbI2DVNxg==}\n engines: {node: '>=6.9.0'}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/compat-data': 7.21.4\n- '@babel/core': 7.21.4\n- '@babel/helper-compilation-targets': 7.21.4(@babel/[email protected])\n- '@babel/helper-plugin-utils': 7.20.2\n+ '@babel/compat-data': 7.21.7\n+ '@babel/core': 7.21.8\n+ '@babel/helper-compilation-targets': 7.21.5(@babel/[email protected])\n+ '@babel/helper-plugin-utils': 7.21.5\n '@babel/helper-validator-option': 7.21.0\n- '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6(@babel/[email protected])\n- '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.20.7(@babel/[email protected])\n- '@babel/plugin-proposal-async-generator-functions': 7.20.7(@babel/[email protected])\n- '@babel/plugin-proposal-class-properties': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-class-static-block': 7.21.0(@babel/[email protected])\n- '@babel/plugin-proposal-dynamic-import': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-export-namespace-from': 7.18.9(@babel/[email protected])\n- '@babel/plugin-proposal-json-strings': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-logical-assignment-operators': 7.20.7(@babel/[email protected])\n- '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-numeric-separator': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/[email protected])\n- '@babel/plugin-proposal-optional-catch-binding': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/[email protected])\n- '@babel/plugin-proposal-private-methods': 7.18.6(@babel/[email protected])\n- '@babel/plugin-proposal-private-property-in-object': 7.21.0(@babel/[email protected])\n- '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/[email protected])\n- '@babel/plugin-syntax-async-generators': 7.8.4(@babel/[email protected])\n- '@babel/plugin-syntax-class-properties': 7.12.13(@babel/[email protected])\n- '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/[email protected])\n- '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-import-assertions': 7.20.0(@babel/[email protected])\n- '@babel/plugin-syntax-json-strings': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/[email protected])\n- '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/[email protected])\n- '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/[email protected])\n- '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/[email protected])\n- '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/[email protected])\n- '@babel/plugin-transform-arrow-functions': 7.20.7(@babel/[email protected])\n- '@babel/plugin-transform-async-to-generator': 7.20.7(@babel/[email protected])\n- '@babel/plugin-transform-block-scoped-functions': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-block-scoping': 7.21.0(@babel/[email protected])\n- '@babel/plugin-transform-classes': 7.21.0(@babel/[email protected])\n- '@babel/plugin-transform-computed-properties': 7.20.7(@babel/[email protected])\n- '@babel/plugin-transform-destructuring': 7.21.3(@babel/[email protected])\n- '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-duplicate-keys': 7.18.9(@babel/[email protected])\n- '@babel/plugin-transform-exponentiation-operator': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-for-of': 7.21.0(@babel/[email protected])\n- '@babel/plugin-transform-function-name': 7.18.9(@babel/[email protected])\n- '@babel/plugin-transform-literals': 7.18.9(@babel/[email protected])\n- '@babel/plugin-transform-member-expression-literals': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-modules-amd': 7.20.11(@babel/[email protected])\n- '@babel/plugin-transform-modules-commonjs': 7.21.2(@babel/[email protected])\n- '@babel/plugin-transform-modules-systemjs': 7.20.11(@babel/[email protected])\n- '@babel/plugin-transform-modules-umd': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-named-capturing-groups-regex': 7.20.5(@babel/[email protected])\n- '@babel/plugin-transform-new-target': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-object-super': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-parameters': 7.21.3(@babel/[email protected])\n- '@babel/plugin-transform-property-literals': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-regenerator': 7.20.5(@babel/[email protected])\n- '@babel/plugin-transform-reserved-words': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-shorthand-properties': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-spread': 7.20.7(@babel/[email protected])\n- '@babel/plugin-transform-sticky-regex': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-template-literals': 7.18.9(@babel/[email protected])\n- '@babel/plugin-transform-typeof-symbol': 7.18.9(@babel/[email protected])\n- '@babel/plugin-transform-unicode-escapes': 7.18.10(@babel/[email protected])\n- '@babel/plugin-transform-unicode-regex': 7.18.6(@babel/[email protected])\n- '@babel/preset-modules': 0.1.5(@babel/[email protected])\n- '@babel/types': 7.21.4\n- babel-plugin-polyfill-corejs2: 0.3.3(@babel/[email protected])\n- babel-plugin-polyfill-corejs3: 0.6.0(@babel/[email protected])\n- babel-plugin-polyfill-regenerator: 0.4.1(@babel/[email protected])\n+ '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-proposal-async-generator-functions': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-proposal-class-properties': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-class-static-block': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-proposal-dynamic-import': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-export-namespace-from': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-proposal-json-strings': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-logical-assignment-operators': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-proposal-nullish-coalescing-operator': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-numeric-separator': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-proposal-optional-catch-binding': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-optional-chaining': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-proposal-private-methods': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-proposal-private-property-in-object': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-syntax-async-generators': 7.8.4(@babel/[email protected])\n+ '@babel/plugin-syntax-class-properties': 7.12.13(@babel/[email protected])\n+ '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/[email protected])\n+ '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-export-namespace-from': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-import-assertions': 7.20.0(@babel/[email protected])\n+ '@babel/plugin-syntax-import-meta': 7.10.4(@babel/[email protected])\n+ '@babel/plugin-syntax-json-strings': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/[email protected])\n+ '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/[email protected])\n+ '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/[email protected])\n+ '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/[email protected])\n+ '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/[email protected])\n+ '@babel/plugin-transform-arrow-functions': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-async-to-generator': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-transform-block-scoped-functions': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-block-scoping': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-transform-classes': 7.21.0(@babel/[email protected])\n+ '@babel/plugin-transform-computed-properties': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-destructuring': 7.21.3(@babel/[email protected])\n+ '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-duplicate-keys': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-transform-exponentiation-operator': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-for-of': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-function-name': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-transform-literals': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-transform-member-expression-literals': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-modules-amd': 7.20.11(@babel/[email protected])\n+ '@babel/plugin-transform-modules-commonjs': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-modules-systemjs': 7.20.11(@babel/[email protected])\n+ '@babel/plugin-transform-modules-umd': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-named-capturing-groups-regex': 7.20.5(@babel/[email protected])\n+ '@babel/plugin-transform-new-target': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-object-super': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-parameters': 7.21.3(@babel/[email protected])\n+ '@babel/plugin-transform-property-literals': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-regenerator': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-reserved-words': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-shorthand-properties': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-spread': 7.20.7(@babel/[email protected])\n+ '@babel/plugin-transform-sticky-regex': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-template-literals': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-transform-typeof-symbol': 7.18.9(@babel/[email protected])\n+ '@babel/plugin-transform-unicode-escapes': 7.21.5(@babel/[email protected])\n+ '@babel/plugin-transform-unicode-regex': 7.18.6(@babel/[email protected])\n+ '@babel/preset-modules': 0.1.5(@babel/[email protected])\n+ '@babel/types': 7.21.5\n+ babel-plugin-polyfill-corejs2: 0.3.3(@babel/[email protected])\n+ babel-plugin-polyfill-corejs3: 0.6.0(@babel/[email protected])\n+ babel-plugin-polyfill-regenerator: 0.4.1(@babel/[email protected])\n core-js-compat: 3.30.1\n semver: 6.3.0\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected](@babel/[email protected]):\n+ /@babel/[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-plugin-utils': 7.20.2\n- '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/[email protected])\n- '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/[email protected])\n- '@babel/types': 7.21.4\n+ '@babel/core': 7.21.8\n+ '@babel/helper-plugin-utils': 7.21.5\n+ '@babel/plugin-proposal-unicode-property-regex': 7.18.6(@babel/[email protected])\n+ '@babel/plugin-transform-dotall-regex': 7.18.6(@babel/[email protected])\n+ '@babel/types': 7.21.5\n esutils: 2.0.3\n \n /@babel/[email protected]:\n resolution: {integrity: sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==}\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-8jI69toZqqcsnqGGqwGS4Qb1VwLOEp4hz+CXPywcvjs60u3B4Pom/U/7rm4W8tMOYEB+E9wgD0mW1l3r8qlI9Q==}\n engines: {node: '>=6.9.0'}\n dependencies:\n regenerator-runtime: 0.13.11\n@@ -7350,41 +7354,41 @@ packages:\n engines: {node: '>=6.9.0'}\n dependencies:\n '@babel/code-frame': 7.21.4\n- '@babel/parser': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/parser': 7.21.8\n+ '@babel/types': 7.21.5\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-AhQoI3YjWi6u/y/ntv7k48mcrCXmus0t79J9qPNlk/lAsFlCiJ047RmbfMOawySTHtywXhbXgpx/8nXMYd+oFw==}\n engines: {node: '>=6.9.0'}\n dependencies:\n '@babel/code-frame': 7.21.4\n- '@babel/generator': 7.21.4\n- '@babel/helper-environment-visitor': 7.18.9\n+ '@babel/generator': 7.21.5\n+ '@babel/helper-environment-visitor': 7.21.5\n '@babel/helper-function-name': 7.21.0\n '@babel/helper-hoist-variables': 7.18.6\n '@babel/helper-split-export-declaration': 7.18.6\n- '@babel/parser': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/parser': 7.21.8\n+ '@babel/types': 7.21.5\n debug: 4.3.4([email protected])\n globals: 11.12.0\n transitivePeerDependencies:\n - supports-color\n \n- /@babel/[email protected]:\n- resolution: {integrity: sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==}\n+ /@babel/[email protected]:\n+ resolution: {integrity: sha512-m4AfNvVF2mVC/F7fDEdH2El3HzUg9It/XsCxZiOTTA3m3qYfcSVSbTfM6Q9xG+hYDniZssYhlXKKUMD5m8tF4Q==}\n engines: {node: '>=6.9.0'}\n dependencies:\n- '@babel/helper-string-parser': 7.19.4\n+ '@babel/helper-string-parser': 7.21.5\n '@babel/helper-validator-identifier': 7.19.1\n to-fast-properties: 2.0.0\n \n- /@commitlint/[email protected]:\n- resolution: {integrity: sha512-kCnDD9LE2ySiTnj/VPaxy4/oRayRcdv4aCuVxtoum8SxIU7OADHc0nJPQfheE8bHcs3zZdWzDMWltRosuT13bg==}\n+ /@commitlint/[email protected]:\n+ resolution: {integrity: sha512-ItSz2fd4F+CujgIbQOfNNerDF1eFlsBGEfp9QcCb1kxTYMuKTYZzA6Nu1YRRrIaaWwe2E7awUGpIMrPoZkOG3A==}\n engines: {node: '>=v14'}\n hasBin: true\n dependencies:\n '@commitlint/format': 17.4.4\n- '@commitlint/lint': 17.6.1\n+ '@commitlint/lint': 17.6.3\n '@commitlint/load': 17.5.0\n '@commitlint/read': 17.5.1\n '@commitlint/types': 17.4.4\n@@ -7392,14 +7396,14 @@ packages:\n lodash.isfunction: 3.0.9\n resolve-from: 5.0.0\n resolve-global: 1.0.0\n- yargs: 17.7.1\n+ yargs: 17.7.2\n transitivePeerDependencies:\n - '@swc/core'\n - '@swc/wasm'\n dev: true\n \n- /@commitlint/[email protected]:\n- resolution: {integrity: sha512-ng/ybaSLuTCH9F+7uavSOnEQ9EFMl7lHEjfAEgRh1hwmEe8SpLKpQeMo2aT1IWvHaGMuTb+gjfbzoRf2IR23NQ==}\n+ /@commitlint/[email protected]:\n+ resolution: {integrity: sha512-bLyHEjjRWqlLQWIgYFHmUPbEFMOOLXeF3QbUinDIJev/u9e769tkoTH9YPknEywiuIrAgZaVo+OfzAIsJP0fsw==}\n engines: {node: '>=v14'}\n dependencies:\n conventional-changelog-conventionalcommits: 5.0.0\n@@ -7438,19 +7442,19 @@ packages:\n chalk: 4.1.2\n dev: true\n \n- /@commitlint/[email protected]:\n- resolution: {integrity: sha512-Y3eo1SFJ2JQDik4rWkBC4tlRIxlXEFrRWxcyrzb1PUT2k3kZ/XGNuCDfk/u0bU2/yS0tOA/mTjFsV+C4qyACHw==}\n+ /@commitlint/[email protected]:\n+ resolution: {integrity: sha512-LQbNdnPbxrpbcrVKR5yf51SvquqktpyZJwqXx3lUMF6+nT9PHB8xn3wLy8pi2EQv5Zwba484JnUwDE1ygVYNQA==}\n engines: {node: '>=v14'}\n dependencies:\n '@commitlint/types': 17.4.4\n- semver: 7.3.8\n+ semver: 7.5.0\n dev: true\n \n- /@commitlint/[email protected]:\n- resolution: {integrity: sha512-VARJ9kxH64isgwVnC+ABPafCYzqxpsWJIpDaTuI0gh8aX4GQ0i7cn9tvxtFNfJj4ER2BAJeWJ0vURdNYjK2RQQ==}\n+ /@commitlint/[email protected]:\n+ resolution: {integrity: sha512-fBlXwt6SHJFgm3Tz+luuo3DkydAx9HNC5y4eBqcKuDuMVqHd2ugMNr+bQtx6riv9mXFiPoKp7nE4Xn/ls3iVDA==}\n engines: {node: '>=v14'}\n dependencies:\n- '@commitlint/is-ignored': 17.4.4\n+ '@commitlint/is-ignored': 17.6.3\n '@commitlint/parse': 17.4.4\n '@commitlint/rules': 17.6.1\n '@commitlint/types': 17.4.4\n@@ -7464,15 +7468,15 @@ packages:\n '@commitlint/execute-rule': 17.4.0\n '@commitlint/resolve-extends': 17.4.4\n '@commitlint/types': 17.4.4\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n chalk: 4.1.2\n cosmiconfig: 8.1.3\n- cosmiconfig-typescript-loader: 4.3.0(@types/[email protected])([email protected])([email protected])([email protected])\n+ cosmiconfig-typescript-loader: 4.3.0(@types/[email protected])([email protected])([email protected])([email protected])\n lodash.isplainobject: 4.0.6\n lodash.merge: 4.6.2\n lodash.uniq: 4.5.0\n resolve-from: 5.0.0\n- ts-node: 10.9.1(@types/[email protected])([email protected])\n+ ts-node: 10.9.1(@types/[email protected])([email protected])\n typescript: 5.0.4\n transitivePeerDependencies:\n - '@swc/core'\n@@ -7574,34 +7578,34 @@ packages:\n - supports-color\n dev: true\n \n- /@es-joy/[email protected]:\n- resolution: {integrity: sha512-5vxWJ1gEkEF0yRd0O+uK6dHJf7adrxwQSX8PuRiPfFSAbNLnY0ZJfXaZucoz14Jj2N11xn2DnlEPwWRpYpvRjg==}\n- engines: {node: ^14 || ^16 || ^17 || ^18 || ^19 || ^20}\n+ /@es-joy/[email protected]:\n+ resolution: {integrity: sha512-TFac4Bnv0ZYNkEeDnOWHQhaS1elWlvOCQxH06iHeu5iffs+hCaLVIZJwF+FqksQi68R4i66Pu+4DfFGvble+Uw==}\n+ engines: {node: '>=16'}\n dependencies:\n comment-parser: 1.3.1\n esquery: 1.5.0\n jsdoc-type-pratt-parser: 4.0.0\n \n- /@eslint-community/[email protected]([email protected]):\n+ /@eslint-community/[email protected]([email protected]):\n resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n eslint: ^6.0.0 || ^7.0.0 || >=8.0.0\n dependencies:\n- eslint: 8.39.0\n- eslint-visitor-keys: 3.4.0\n+ eslint: 8.40.0\n+ eslint-visitor-keys: 3.4.1\n \n- /@eslint-community/[email protected]:\n- resolution: {integrity: sha512-vITaYzIcNmjn5tF5uxcZ/ft7/RXGrMUIS9HalWckEOF6ESiwXKoMzAQf2UW0aVd6rnOeExTJVd5hmWXucBKGXQ==}\n+ /@eslint-community/[email protected]:\n+ resolution: {integrity: sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==}\n engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}\n \n- /@eslint/[email protected]:\n- resolution: {integrity: sha512-3W4f5tDUra+pA+FzgugqL2pRimUTDJWKr7BINqOpkZrC0uYI0NIc0/JFgBROCU07HR6GieA5m3/rsPIhDmCXTQ==}\n+ /@eslint/[email protected]:\n+ resolution: {integrity: sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n dependencies:\n ajv: 6.12.6\n debug: 4.3.4([email protected])\n- espree: 9.5.1\n+ espree: 9.5.2\n globals: 13.20.0\n ignore: 5.2.4\n import-fresh: 3.3.0\n@@ -7611,8 +7615,8 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /@eslint/[email protected]:\n- resolution: {integrity: sha512-kf9RB0Fg7NZfap83B3QOqOGg9QmD9yBudqQXzzOtn3i4y7ZUXe5ONeW34Gwi+TxhH4mvj72R1Zc300KUMa9Bng==}\n+ /@eslint/[email protected]:\n+ resolution: {integrity: sha512-ElyB54bJIhXQYVKjDSvCkPO1iU1tSAeVQJbllWJq1XQSmmA4dgFk8CbiBGpiOPxleE48vDogxCtmMYku4HSVLA==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n \n /@fortawesome/[email protected]:\n@@ -7647,6 +7651,17 @@ packages:\n engines: {node: '>=6.9.0'}\n dev: true\n \n+ /@isaacs/[email protected]:\n+ resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}\n+ engines: {node: '>=12'}\n+ dependencies:\n+ string-width: 5.1.2\n+ string-width-cjs: /[email protected]\n+ strip-ansi: 7.0.1\n+ strip-ansi-cjs: /[email protected]\n+ wrap-ansi: 8.1.0\n+ wrap-ansi-cjs: /[email protected]\n+\n /@isaacs/[email protected]:\n resolution: {integrity: sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==}\n dev: true\n@@ -7720,26 +7735,26 @@ packages:\n '@jridgewell/sourcemap-codec': 1.4.15\n dev: true\n \n- /@lerna/[email protected]:\n- resolution: {integrity: sha512-yUCDCcRNNbI9UUsUB6FYEmDHpo5Tn/f0q5D7vhDP4i6Or8kBj82y7+e31hwfLvK2ykOYlDVs2MxAluH/+QUBOQ==}\n- engines: {node: ^14.15.0 || >=16.0.0}\n+ /@lerna/[email protected]:\n+ resolution: {integrity: sha512-QyKIWEnKQFnYu2ey+SAAm1A5xjzJLJJj3bhIZd3QKyXKKjaJ0hlxam/OsWSltxTNbcyH1jRJjC6Cxv31usv0Ag==}\n+ engines: {node: ^14.17.0 || >=16.0.0}\n dependencies:\n- chalk: 4.1.2\n+ chalk: 4.1.0\n execa: 5.0.0\n strong-log-transformer: 2.1.0\n dev: true\n \n- /@lerna/[email protected]:\n- resolution: {integrity: sha512-GDmHFhQ0mr0RcXWXrsLyfMV6ch/dZV/Ped1e6sFVQhsLL9P+FFXX1ZWxa/dQQ90VWF2qWcmK0+S/L3kUz2xvTA==}\n- engines: {node: ^14.15.0 || >=16.0.0}\n+ /@lerna/[email protected]:\n+ resolution: {integrity: sha512-xQ+1Y7D+9etvUlE+unhG/TwmM6XBzGIdFBaNoW8D8kyOa9M2Jf3vdEtAxVa7mhRz66CENfhL/+I/QkVaa7pwbQ==}\n+ engines: {node: ^14.17.0 || >=16.0.0}\n dependencies:\n- '@lerna/child-process': 6.6.1\n+ '@lerna/child-process': 6.6.2\n dedent: 0.7.0\n fs-extra: 9.1.0\n init-package-json: 3.0.2\n npm-package-arg: 8.1.1\n p-reduce: 2.1.0\n- pacote: 13.6.2\n+ pacote: 15.1.1\n pify: 5.0.0\n semver: 7.5.0\n slash: 3.0.0\n@@ -7751,13 +7766,13 @@ packages:\n - supports-color\n dev: true\n \n- /@lerna/[email protected]([email protected]):\n- resolution: {integrity: sha512-0EYxSFr34VgeudA5rvjGJSY7s4seITMVB7AJ9LRFv9QDUk6jpvapV13ZAaKnhDTxX5vNCfnJuWHXXWq0KyPF/Q==}\n- engines: {node: ^14.15.0 || >=16.0.0}\n+ /@lerna/[email protected]([email protected]):\n+ resolution: {integrity: sha512-0hZxUPKnHwehUO2xC4ldtdX9bW0W1UosxebDIQlZL2STnZnA2IFmIk2lJVUyFW+cmTPQzV93jfS0i69T9Z+teg==}\n+ engines: {node: ^14.17.0 || >=16.0.0}\n dependencies:\n '@npmcli/arborist': 6.2.3\n '@npmcli/run-script': 4.1.7\n- '@nrwl/devkit': 15.9.2([email protected])\n+ '@nrwl/devkit': 15.9.4([email protected])\n '@octokit/rest': 19.0.3\n byte-size: 7.0.0\n chalk: 4.1.0\n@@ -7784,7 +7799,7 @@ packages:\n inquirer: 8.2.4\n is-ci: 2.0.0\n is-stream: 2.0.0\n- libnpmpublish: 6.0.4\n+ libnpmpublish: 7.1.4\n load-json-file: 6.2.0\n make-dir: 3.1.0\n minimatch: 3.0.5\n@@ -7798,7 +7813,7 @@ packages:\n p-map-series: 2.1.0\n p-queue: 6.6.2\n p-waterfall: 2.1.1\n- pacote: 13.6.2\n+ pacote: 15.1.1\n pify: 5.0.0\n pretty-format: 29.4.3\n read-cmd-shim: 3.0.0\n@@ -7836,7 +7851,7 @@ packages:\n npmlog: 5.0.1\n rimraf: 3.0.2\n semver: 7.5.0\n- tar: 6.1.13\n+ tar: 6.1.14\n transitivePeerDependencies:\n - encoding\n - supports-color\n@@ -7878,15 +7893,15 @@ packages:\n '@isaacs/string-locale-compare': 1.1.0\n '@npmcli/fs': 3.1.0\n '@npmcli/installed-package-contents': 2.0.2\n- '@npmcli/map-workspaces': 3.0.3\n+ '@npmcli/map-workspaces': 3.0.4\n '@npmcli/metavuln-calculator': 5.0.1\n '@npmcli/name-from-folder': 2.0.0\n '@npmcli/node-gyp': 3.0.0\n '@npmcli/package-json': 3.0.0\n '@npmcli/query': 3.0.0\n- '@npmcli/run-script': 6.0.0\n+ '@npmcli/run-script': 6.0.1\n bin-links: 4.0.1\n- cacache: 17.0.5\n+ cacache: 17.1.0\n common-ancestor-path: 1.0.1\n hosted-git-info: 6.1.1\n json-parse-even-better-errors: 3.0.0\n@@ -7896,16 +7911,16 @@ packages:\n npm-install-checks: 6.1.1\n npm-package-arg: 10.1.0\n npm-pick-manifest: 8.0.1\n- npm-registry-fetch: 14.0.4\n+ npm-registry-fetch: 14.0.5\n npmlog: 7.0.1\n- pacote: 15.1.2\n+ pacote: 15.1.1\n parse-conflict-json: 3.0.1\n proc-log: 3.0.0\n promise-all-reject-late: 1.0.1\n promise-call-limit: 1.0.2\n read-package-json-fast: 3.0.2\n semver: 7.5.0\n- ssri: 10.0.3\n+ ssri: 10.0.4\n treeverse: 3.0.0\n walk-up-path: 1.0.0\n transitivePeerDependencies:\n@@ -7928,23 +7943,6 @@ packages:\n semver: 7.5.0\n dev: true\n \n- /@npmcli/[email protected]:\n- resolution: {integrity: sha512-CAcd08y3DWBJqJDpfuVL0uijlq5oaXaOJEKHKc4wqrjd00gkvTZB+nFuLn+doOOKddaQS9JfqtNoFCO2LCvA3w==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n- dependencies:\n- '@npmcli/promise-spawn': 3.0.0\n- lru-cache: 7.18.3\n- mkdirp: 1.0.4\n- npm-pick-manifest: 7.0.2\n- proc-log: 2.0.1\n- promise-inflight: 1.0.1\n- promise-retry: 2.0.1\n- semver: 7.5.0\n- which: 2.0.2\n- transitivePeerDependencies:\n- - bluebird\n- dev: true\n-\n /@npmcli/[email protected]:\n resolution: {integrity: sha512-5yZghx+u5M47LghaybLCkdSyFzV/w4OuH12d96HO389Ik9CDsLaDZJVynSGGVJOLn6gy/k7Dz5XYcplM3uxXRg==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n@@ -7956,36 +7954,27 @@ packages:\n promise-inflight: 1.0.1\n promise-retry: 2.0.1\n semver: 7.5.0\n- which: 3.0.0\n+ which: 3.0.1\n transitivePeerDependencies:\n - bluebird\n dev: true\n \n- /@npmcli/[email protected]:\n- resolution: {integrity: sha512-9rufe0wnJusCQoLpV9ZPKIVP55itrM5BxOXs10DmdbRfgWtHy1LDyskbwRnBghuB0PrF7pNPOqREVtpz4HqzKw==}\n- engines: {node: '>= 10'}\n- hasBin: true\n- dependencies:\n- npm-bundled: 1.1.2\n- npm-normalize-package-bin: 1.0.1\n- dev: true\n-\n /@npmcli/[email protected]:\n resolution: {integrity: sha512-xACzLPhnfD51GKvTOOuNX2/V4G4mz9/1I2MfDoye9kBM3RYe5g2YbscsaGoTlaWqkxeiapBWyseULVKpSVHtKQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n hasBin: true\n dependencies:\n npm-bundled: 3.0.0\n- npm-normalize-package-bin: 3.0.0\n+ npm-normalize-package-bin: 3.0.1\n dev: true\n \n- /@npmcli/[email protected]:\n- resolution: {integrity: sha512-HlCvFuTzw4UNoKyZdqiNrln+qMF71QJkxy2dsusV8QQdoa89e2TF4dATCzBxbl4zzRzdDoWWyP5ADVrNAH9cRQ==}\n+ /@npmcli/[email protected]:\n+ resolution: {integrity: sha512-Z0TbvXkRbacjFFLpVpV0e2mheCh+WzQpcqL+4xp49uNJOxOnIAPZyXtUxZ5Qn3QBTGKA11Exjd9a5411rBrhDg==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n '@npmcli/name-from-folder': 2.0.0\n- glob: 9.3.5\n- minimatch: 7.4.6\n+ glob: 10.2.2\n+ minimatch: 9.0.0\n read-package-json-fast: 3.0.2\n dev: true\n \n@@ -7993,9 +7982,9 @@ packages:\n resolution: {integrity: sha512-qb8Q9wIIlEPj3WeA1Lba91R4ZboPL0uspzV0F9uwP+9AYMVB2zOoa7Pbk12g6D2NHAinSbHh6QYmGuRyHZ874Q==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- cacache: 17.0.5\n+ cacache: 17.1.0\n json-parse-even-better-errors: 3.0.0\n- pacote: 15.1.2\n+ pacote: 15.1.1\n semver: 7.5.0\n transitivePeerDependencies:\n - bluebird\n@@ -8044,14 +8033,14 @@ packages:\n resolution: {integrity: sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- which: 3.0.0\n+ which: 3.0.1\n dev: true\n \n /@npmcli/[email protected]:\n resolution: {integrity: sha512-MFNDSJNgsLZIEBVZ0Q9w9K7o07j5N4o4yjtdz2uEpuCZlXGMuPENiRaFYk0vRqAA64qVuUQwC05g27fRtfUgnA==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- postcss-selector-parser: 6.0.11\n+ postcss-selector-parser: 6.0.12\n dev: true\n \n /@npmcli/[email protected]:\n@@ -8068,15 +8057,15 @@ packages:\n - supports-color\n dev: true\n \n- /@npmcli/[email protected]:\n- resolution: {integrity: sha512-ql+AbRur1TeOdl1FY+RAwGW9fcr4ZwiVKabdvm93mujGREVuVLbdkXRJDrkTXSdCjaxYydr1wlA2v67jxWG5BQ==}\n+ /@npmcli/[email protected]:\n+ resolution: {integrity: sha512-Yi04ZSold8jcbBJD/ahKMJSQCQifH8DAbMwkBvoLaTpGFxzHC3B/5ZyoVR69q/4xedz84tvi9DJOJjNe17h+LA==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n '@npmcli/node-gyp': 3.0.0\n '@npmcli/promise-spawn': 6.0.2\n node-gyp: 9.3.1\n read-package-json-fast: 3.0.2\n- which: 3.0.0\n+ which: 3.0.1\n transitivePeerDependencies:\n - bluebird\n - supports-color\n@@ -8092,8 +8081,8 @@ packages:\n - debug\n dev: true\n \n- /@nrwl/[email protected]([email protected]):\n- resolution: {integrity: sha512-2DvTstVZb91m+d4wqUJMBHQ3elxyabdmFE6/3aXmtOGeDxTyXyDzf/1O6JvBBiL8K6XC3ZYchjtxUHgxl/NJ5A==}\n+ /@nrwl/[email protected]([email protected]):\n+ resolution: {integrity: sha512-mUX1kXTuPMdTzFxIzH+MsSNvdppOmstPDOEtiGFZJTuJ625ki0HhNJILO3N2mJ7MeMrLqIlAiNdvelQaObxYsQ==}\n peerDependencies:\n nx: '>= 14.1 <= 16'\n dependencies:\n@@ -8105,10 +8094,10 @@ packages:\n tslib: 2.5.0\n dev: true\n \n- /@nrwl/[email protected]([email protected]):\n- resolution: {integrity: sha512-DbSV0vgPc3n/1mP0rKnVgO+m+mNGWRyUjvYifD1IJEvY8qaTsakfqIheINNElK369D5AHrIajCc2FJFHb1OrMw==}\n+ /@nrwl/[email protected]([email protected]):\n+ resolution: {integrity: sha512-KIzHt5g2+AkH4LgEMksPL0q5FUiERtbeP1VNDw57grhaOAviLaYklKU3GA8Zaj73KxGIeHhwCQU0Ju5aIoDDdg==}\n dependencies:\n- '@nx/devkit': 16.0.3([email protected])\n+ '@nx/devkit': 16.1.1([email protected])\n transitivePeerDependencies:\n - nx\n dev: true\n@@ -8213,43 +8202,43 @@ packages:\n - debug\n dev: true\n \n- /@nrwl/[email protected]:\n- resolution: {integrity: sha512-CAECe97eReR1vVf5iuv+Nw0msDJS/HtRBpkkN5e65lSJzEKdsOgpWtteU0QzexN/Spnah8Q/ByM14ii2P1xpCQ==}\n+ /@nrwl/[email protected]:\n+ resolution: {integrity: sha512-rqN5hKfsiPYU1qoyudhmKI17NvK6drX2odeBNce3Ap30fPLDB0R0diAbtgY3tgXCSVBEamhGgSy/4+hngyzXbQ==}\n hasBin: true\n dependencies:\n- nx: 16.0.3\n+ nx: 16.1.1\n transitivePeerDependencies:\n - '@swc-node/register'\n - '@swc/core'\n - debug\n dev: true\n \n- /@nrwl/[email protected]:\n- resolution: {integrity: sha512-94mcNTttzv9IPKrA/uwGpb1gtIdZKaOJS0Pn/Ec72vEdqL+La6pUaYzEvrPSaBNlf6p+3ZzGYCtw03exar/GdQ==}\n+ /@nrwl/[email protected]:\n+ resolution: {integrity: sha512-gyZX2N8Q4OWGYie6LB1+wwOgNfDY1kppWacez9xtYTqhZlJ7L6VbggN+ui72dgsd1qAu/jd5t6GVD098Ff9FpA==}\n dependencies:\n- '@nx/workspace': 16.0.3\n+ '@nx/workspace': 16.1.1\n transitivePeerDependencies:\n - '@swc-node/register'\n - '@swc/core'\n - debug\n dev: true\n \n- /@nx/[email protected]([email protected]):\n- resolution: {integrity: sha512-DO4cq1tBtGB+XiHehvJEjYf7gJ/TgDfWuN9PJqcAGC7wxxlsS/4P6ZEr6kmlPjb1FHH6NLgtcp+R+D0zSpV6Ag==}\n+ /@nx/[email protected]([email protected]):\n+ resolution: {integrity: sha512-Tjsj2tKSQnMBmbXKnVSGzcdWDzy7T1jcvbazJ1pf36AwmGbaUj6+sleXceeOguk4dd3lg1yWibjCk+ICMsXIvg==}\n peerDependencies:\n nx: '>= 15 <= 17'\n dependencies:\n- '@nrwl/devkit': 16.0.3([email protected])\n+ '@nrwl/devkit': 16.1.1([email protected])\n ejs: 3.1.9\n ignore: 5.2.4\n- nx: 16.0.3\n+ nx: 16.1.1\n semver: 7.3.4\n tmp: 0.2.1\n tslib: 2.5.0\n dev: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-QBOlHVTSs6ZN86QoomFDsIhk9rhpCS7w7O6t4wSL4YyB09IqX0unvrsaLLoAx2iEbm+UOMc12klVKWoMQyhHwg==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-TzNTgbulEhWm5OtddxUm88RfRL/QMfa4r3l9fu3GrkaSbIMcApY3hERTQaTEsxPBYyITUbqXhtt4MszOP/BtlA==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [darwin]\n@@ -8257,8 +8246,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-h2ZjBCwt2Nv7+F9tKHl7Y/xFGn9YUJEkS5TzFTgTfvL9AP5O/8cqwiDebslxeZ7Choff5Slmkj+P2WhJhKa7qQ==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-pQtP+r5XRC74JEZ5EfxbbNohxILv+7TNXyA5iSrX1e2EsCFfv2eRET6TjBnQxPjsmcEsS+FibjllR3ovdb4BGA==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [darwin]\n@@ -8266,8 +8255,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-gJVkTxOPDdgDPwaMYKvjfcIrWnt1cMlD3tKe9j4jVWsddO9JFLRAtknhXiLFkdcgZwQPo8P1hvo7QgbavbuVSA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-8V0JzLBNauXRSVoTWfv/V5e+3xKKoxoOzldH71JDXwtSioCNxx26vXhVYdLaVUpBHkfLz0Zx/bSOS8xjhg2mww==}\n engines: {node: '>= 10'}\n cpu: [arm]\n os: [linux]\n@@ -8275,8 +8264,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-p1ZxZxosfso74aDrP+ofmzrqH3om4LpRAfLDN69L/4OMTT10qLsPpMShWpw9j1U+wZc5o05ZdTMCbeg4jx4Tug==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-LbN3rQYzL6n4F8dTAnVqyZONyaYHakiUehRfypPfMsTywgCjGKlkeRi+32NoPB+gTj9HE+dbdmb1b08PAKHN+w==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [linux]\n@@ -8284,8 +8273,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-t2cQw84TyVdDATFK2R3NyG/LiweBiBLcneHuI8AFr0PSJSqof8BIHqX9NNB2L5z9j2XkEeMgsqnlXfQtOzNXJA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-rMKC+yGkH7FuWT8O4f8aWHpJireKBVfWhgziHxL3GisdtgvHSA5O1NCaKAW/jflLOuyj02aPrRphnEovhoPTgw==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [linux]\n@@ -8293,8 +8282,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-sKYP9RXAK+wZRXUIlOhqwvSOW6FJblJZPKKHXCWWq9dvAr3CXaWKr+Tt+7846fAX3Q+3hyckuAi6eEiT3XHMTA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-q8AG22WDdy2pVuQoD7ZjgTmok8GHhInZgcRTnbW3+RtF/0vF9nZ8lHzVedCHqSErisb+dz1VVXLe250s2j+pRA==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [linux]\n@@ -8302,8 +8291,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-AcDWUAs+Tgy4M0bguehUmZTPKE97pYP9GCM8QPzeNJLWyjCfkwHSUImOa0C8+Vx33uRO7I6uPa6QGEoEv/TiFg==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-Lse5oMwcE44UvxvxncCKkCrbWBiBPYIiAM7GC62nt1h8td8k14z7JxZV6dB/Yri2fFHCKDoY2tEkW9N6E4VzqQ==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [linux]\n@@ -8311,8 +8300,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-2aXeXYLOhyS0hrSqM5T0T4GK3EoQZZY7oyO4+ruk5f5JZq4LxmdJsrnkgBBR3FmPt7P/GT8vykgJuO9pUAWohQ==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-KV+sn/w9rEtKgs2DGvkWeGLmgB3LgsaBcekPLV6oEjQo58dDsyGxZlOwfK3hF4R50l8J039cVRfPtLhWxuRUuA==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [win32]\n@@ -8320,8 +8309,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-/Ezru8nlrckLAQ4s7wEAW192DULgWj9YBqREQPV0ddzwjeVC0clsHSXguH8WzMwEU44+IFnqNMJrWK3mZCvZYA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-YSzkfnCDmflg16sUeyC1IiRAxQ5nAW5KnSfvr901kW2LqAsZ7esnSjaHrP7SOGk7JO3ncmLm7BWvtZ6N9Dk4bA==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [win32]\n@@ -8329,11 +8318,11 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-jy5a7Gu0p5JgVfVyrnvE5Hqy0A+57U1pl5R3U4XvNNa1848ibVnT3qMkb7s6S50nN6WWHwuCAsIMEV/w88OmRA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-LN3F/NKVBYys1HGKLeu57aCir5YEsmlIXHsU8gCdouK/1Xdyz9s4hfyo/ioQRzZpSjwgjmnJD+YzhdkYk+4V1A==}\n dependencies:\n- '@nrwl/workspace': 16.0.3\n- '@nx/devkit': 16.0.3([email protected])\n+ '@nrwl/workspace': 16.1.1\n+ '@nx/devkit': 16.1.1([email protected])\n '@parcel/watcher': 2.0.4\n chalk: 4.1.2\n chokidar: 3.5.3\n@@ -8345,12 +8334,12 @@ packages:\n ignore: 5.2.4\n minimatch: 3.0.5\n npm-run-path: 4.0.1\n- nx: 16.0.3\n+ nx: 16.1.1\n open: 8.4.2\n- rxjs: 6.6.7\n+ rxjs: 7.8.1\n tmp: 0.2.1\n tslib: 2.5.0\n- yargs: 17.7.1\n+ yargs: 17.7.2\n yargs-parser: 21.1.1\n transitivePeerDependencies:\n - '@swc-node/register'\n@@ -8362,7 +8351,7 @@ packages:\n resolution: {integrity: sha512-/aFM2M4HVDBT/jjDBa84sJniv1t9Gm/rLkalaz9htOm+L+8JMj1k9w0CkUdcxNyNxZPlTxKPVko+m1VlM58ZVA==}\n engines: {node: '>= 14'}\n dependencies:\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n dev: true\n \n /@octokit/[email protected]:\n@@ -8373,7 +8362,7 @@ packages:\n '@octokit/graphql': 5.0.5\n '@octokit/request': 6.2.3\n '@octokit/request-error': 3.0.3\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n before-after-hook: 2.2.3\n universal-user-agent: 6.0.0\n transitivePeerDependencies:\n@@ -8384,7 +8373,7 @@ packages:\n resolution: {integrity: sha512-LG4o4HMY1Xoaec87IqQ41TQ+glvIeTKqfjkCEmt5AIwDZJwQeVZFIEYXrYY6yLwK+pAScb9Gj4q+Nz2qSw1roA==}\n engines: {node: '>= 14'}\n dependencies:\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n is-plain-object: 5.0.0\n universal-user-agent: 6.0.0\n dev: true\n@@ -8394,7 +8383,7 @@ packages:\n engines: {node: '>= 14'}\n dependencies:\n '@octokit/request': 6.2.3\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n universal-user-agent: 6.0.0\n transitivePeerDependencies:\n - encoding\n@@ -8408,8 +8397,8 @@ packages:\n resolution: {integrity: sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==}\n dev: true\n \n- /@octokit/[email protected]:\n- resolution: {integrity: sha512-V8BVJGN0ZmMlURF55VFHFd/L92XQQ43KvFjNmY1IYbCN3V/h/uUFV6iQi19WEHM395Nn+1qhUbViCAD/1czzog==}\n+ /@octokit/[email protected]:\n+ resolution: {integrity: sha512-/X7Gh/qWiWaooJmUnYD48SYy72fyrk2ceisOSe89JojK7r0j8YrTwYpDi76kI+c6QiqX1KSgdoBTMJvktsDkYw==}\n dev: true\n \n /@octokit/[email protected]:\n@@ -8449,7 +8438,7 @@ packages:\n resolution: {integrity: sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==}\n engines: {node: '>= 14'}\n dependencies:\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n deprecation: 2.3.1\n once: 1.4.0\n dev: true\n@@ -8460,7 +8449,7 @@ packages:\n dependencies:\n '@octokit/endpoint': 7.0.5\n '@octokit/request-error': 3.0.3\n- '@octokit/types': 9.1.2\n+ '@octokit/types': 9.2.1\n is-plain-object: 5.0.0\n node-fetch: 2.6.7\n universal-user-agent: 6.0.0\n@@ -8492,10 +8481,10 @@ packages:\n '@octokit/openapi-types': 14.0.0\n dev: true\n \n- /@octokit/[email protected]:\n- resolution: {integrity: sha512-LPbJIuu1WNoRHbN4UMysEdlissRFpTCWyoKT7kHPufI8T+XX33/qilfMWJo3mCOjNIKu0+43oSQPf+HJa0+TTQ==}\n+ /@octokit/[email protected]:\n+ resolution: {integrity: sha512-Vx4keMiD/CAiwVFasLcH0xBSVbKIHebIZke9i7ZbUWGNN4vJFWSYH6Nvga7UY9NIJCGa6x3QG849XTbi5wYmkA==}\n dependencies:\n- '@octokit/openapi-types': 17.0.0\n+ '@octokit/openapi-types': 17.1.1\n dev: true\n \n /@parcel/[email protected]:\n@@ -8581,12 +8570,12 @@ packages:\n '@tsparticles/prettier-config': 1.9.0\n '@tsparticles/tsconfig': 1.12.0\n '@tsparticles/webpack-plugin': 1.13.0\n- '@typescript-eslint/eslint-plugin': 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n- '@typescript-eslint/parser': 5.59.0([email protected])([email protected])\n+ '@typescript-eslint/eslint-plugin': 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n+ '@typescript-eslint/parser': 5.59.2([email protected])([email protected])\n commander: 10.0.1\n- eslint: 8.39.0\n- eslint-config-prettier: 8.8.0([email protected])\n- eslint-plugin-jsdoc: 43.0.7([email protected])\n+ eslint: 8.40.0\n+ eslint-config-prettier: 8.8.0([email protected])\n+ eslint-plugin-jsdoc: 43.2.0([email protected])\n eslint-plugin-tsdoc: 0.2.17\n fs-extra: 11.1.1\n klaw: 4.1.0\n@@ -8594,7 +8583,7 @@ packages:\n prompts: 2.4.2\n rimraf: 5.0.0\n typescript: 5.0.4\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n transitivePeerDependencies:\n - '@swc/core'\n - '@webpack-cli/generators'\n@@ -8611,11 +8600,11 @@ packages:\n resolution: {integrity: sha512-u5kVl2i1vxAS2tOpR0idZq3qkC6lr/Gj6KASpH24BPedwiiCfWzUgOQbIdnpZVYdaVAhgMX+0YEqO9qE8S46/g==}\n dependencies:\n '@tsparticles/prettier-config': 1.9.0\n- '@typescript-eslint/eslint-plugin': 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n- '@typescript-eslint/parser': 5.59.0([email protected])([email protected])\n- eslint: 8.39.0\n- eslint-config-prettier: 8.8.0([email protected])\n- eslint-plugin-jsdoc: 43.0.7([email protected])\n+ '@typescript-eslint/eslint-plugin': 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n+ '@typescript-eslint/parser': 5.59.2([email protected])([email protected])\n+ eslint: 8.40.0\n+ eslint-config-prettier: 8.8.0([email protected])\n+ eslint-plugin-jsdoc: 43.2.0([email protected])\n eslint-plugin-tsdoc: 0.2.17\n prettier: 2.8.8\n typescript: 5.0.4\n@@ -8635,27 +8624,27 @@ packages:\n /@tsparticles/[email protected]:\n resolution: {integrity: sha512-yOiOMSyvGJvShSfk2PFD4myYXc7R2givPYi7UpmRV37/o7xVVJP9SyNsuMeQQMiUd9Xr19yVRVi72b2XsKJT8A==}\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/preset-env': 7.21.4(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/preset-env': 7.21.5(@babel/[email protected])\n '@tsparticles/eslint-config': 1.11.0\n '@tsparticles/prettier-config': 1.9.0\n- '@types/node': 18.16.3\n+ '@types/node': 18.16.5\n '@types/webpack-bundle-analyzer': 4.6.0([email protected])\n '@types/webpack-env': 1.18.0\n- '@typescript-eslint/eslint-plugin': 5.59.0(@typescript-eslint/[email protected])([email protected])([email protected])\n- '@typescript-eslint/parser': 5.59.0([email protected])([email protected])\n- babel-loader: 9.1.2(@babel/[email protected])([email protected])\n+ '@typescript-eslint/eslint-plugin': 5.59.2(@typescript-eslint/[email protected])([email protected])([email protected])\n+ '@typescript-eslint/parser': 5.59.2([email protected])([email protected])\n+ babel-loader: 9.1.2(@babel/[email protected])([email protected])\n browserslist: 4.21.5\n copyfiles: 2.4.1\n- eslint: 8.39.0\n- eslint-config-prettier: 8.8.0([email protected])\n+ eslint: 8.40.0\n+ eslint-config-prettier: 8.8.0([email protected])\n prettier: 2.8.8\n rimraf: 5.0.0\n- terser-webpack-plugin: 5.3.7([email protected])\n+ terser-webpack-plugin: 5.3.7([email protected])\n typescript: 5.0.4\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n webpack-bundle-analyzer: 4.8.0\n- webpack-cli: 5.0.2([email protected])([email protected])\n+ webpack-cli: 5.0.2([email protected])([email protected])\n transitivePeerDependencies:\n - '@swc/core'\n - '@webpack-cli/generators'\n@@ -8671,12 +8660,12 @@ packages:\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dev: true\n \n- /@tufjs/[email protected]:\n- resolution: {integrity: sha512-mkFEqqRisi13DmR5pX4x+Zk97EiU8djTtpNW1GeuX410y/raAsq/T3ZCjwoRIZ8/cIBfW0olK/sywlAiWevDVw==}\n+ /@tufjs/[email protected]:\n+ resolution: {integrity: sha512-qaGV9ltJP0EO25YfFUPhxRVK0evXFIAGicsVXuRim4Ed9cjPxYhNnNJ49SFmbeLgtxpslIkX317IgpfcHPVj/A==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n '@tufjs/canonical-json': 1.0.0\n- minimatch: 7.4.6\n+ minimatch: 9.0.0\n dev: true\n \n /@types/[email protected]:\n@@ -8694,12 +8683,12 @@ packages:\n dependencies:\n '@types/http-cache-semantics': 4.0.1\n '@types/keyv': 3.1.4\n- '@types/node': 18.16.3\n+ '@types/node': 18.16.5\n '@types/responselike': 1.0.0\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-mEo1sAde+UCE6b2hxn332f1g1E8WfYRu6p5SvTKr2ZKC1f7gFJXk4h5PyGP9Dt6gCaG8y8XhwnXWC6Iy2cmBng==}\n dev: true\n \n /@types/[email protected]:\n@@ -8721,14 +8710,7 @@ packages:\n resolution: {integrity: sha512-MxObHvNl4A69ofaTRU8DFqvgzzv8s9yRtaPPm5gud9HDNvpB3GPQFvNuTWAI59B9huVGV5jXYJwbCsmBsOGYWA==}\n dependencies:\n '@types/jsonfile': 6.1.1\n- '@types/node': 18.16.0\n- dev: true\n-\n- /@types/[email protected]:\n- resolution: {integrity: sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==}\n- dependencies:\n- '@types/minimatch': 5.1.2\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n dev: true\n \n /@types/[email protected]:\n@@ -8738,7 +8720,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-cZFuoVLtzKP3gmq9eNosUL1R50U+USkbLtUQ1bYVgl/lKp0FZM7Cq4aIHAL8oIvQ17uSHi7jXPtfDOdjPwBE7A==}\n dependencies:\n- '@types/node': 18.16.0\n+ '@types/node': 20.1.0\n '@types/tough-cookie': 4.0.2\n parse5: 7.1.2\n dev: true\n@@ -8749,19 +8731,19 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-GSgiRCVeapDN+3pqA35IkQwasaCh/0YFH5dEF6S88iDvEn901DjOeH3/QPY+XYP1DFzDZPvIvfeEgk+7br5png==}\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n dev: true\n \n /@types/[email protected]:\n resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==}\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 18.16.5\n dev: true\n \n /@types/[email protected]:\n resolution: {integrity: sha512-mXlRDFbTLpVysvxahXUQav0hFctgu3Fqr2xmSrpf/ptO/FwOp7SFEGsJkEihwshMbof3/BIiVJ/o42cuOOuv6g==}\n dependencies:\n- '@types/node': 18.16.0\n+ '@types/node': 20.1.0\n dev: true\n \n /@types/[email protected]:\n@@ -8772,10 +8754,6 @@ packages:\n resolution: {integrity: sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==}\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==}\n- dev: true\n-\n /@types/[email protected]:\n resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==}\n dev: true\n@@ -8784,16 +8762,15 @@ packages:\n resolution: {integrity: sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==}\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-zvSN2Esek1aeLdKDYuntKAYjti9Z2oT4I8bfkLLhIxHlv3dwZ5vvATxOc31820iYm4hQRCwjUgDpwSMFjfTUnw==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-pCNBzNQqCXE4A6FWDmrn/o1Qu+qBf8tnorBlNoPNSBQJF+jXzvTKNI/aMiE+hGJbK5sDAD65g7OS/YwSHIEJdw==}\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-BsAaKhB+7X+H4GnSjGhJG9Qi8Tw+inU9nJDwmD5CgOmBLEI6ArdhikpLX7DjbjDRDTbqZzU2LSQNZg8WGPiSZQ==}\n- dev: true\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-seOA34WMo9KB+UA78qaJoCO20RJzZGVXQ5Sh6FWu0g/hfT44nKXnej3/tCQl7FL97idFpBhisLYCTB50S0EirA==}\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-OPs5WnnT1xkCBiuQrZA4+YAV4HEJejmHneyraIaxsbev5yCEr6KMwINNFP9wQeFIw8FWcoTqF3vQsa5CDaI+8Q==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-O+z53uwx64xY7D6roOi4+jApDGFg0qn6WHcxe5QeqjMaTezBO/mxdfFXIVAVVyNWKx84OmPB3L8kbVYOTeN34A==}\n \n /@types/[email protected]:\n resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==}\n@@ -8810,14 +8787,14 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==}\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 18.16.5\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-DTCZoIQotB2SUJnYgrEx43cQIUYOlNZz0AZPbKU4PSLYTUdML5Gox0++z4F9kQocxStrCmRNhi4x5x/UlwtKUA==}\n+ deprecated: This is a stub types definition. rimraf provides its own type definitions, so you do not need this installed.\n dependencies:\n- '@types/glob': 8.1.0\n- '@types/node': 18.16.0\n+ rimraf: 5.0.0\n dev: true\n \n /@types/[email protected]:\n@@ -8830,9 +8807,9 @@ packages:\n /@types/[email protected]([email protected]):\n resolution: {integrity: sha512-XeQmQCCXdZdap+A/60UKmxW5Mz31Vp9uieGlHB3T4z/o2OLVLtTI3bvTuS6A2OWd/rbAAQiGGWIEFQACu16szA==}\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n tapable: 2.2.1\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n transitivePeerDependencies:\n - '@swc/core'\n - esbuild\n@@ -8846,12 +8823,12 @@ packages:\n resolution: {integrity: sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw==}\n requiresBuild: true\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 18.16.5\n dev: true\n optional: true\n \n- /@typescript-eslint/[email protected](@typescript-eslint/[email protected])([email protected])([email protected]):\n- resolution: {integrity: sha512-p0QgrEyrxAWBecR56gyn3wkG15TJdI//eetInP3zYRewDh0XS+DhB3VUAd3QqvziFsfaQIoIuZMxZRB7vXYaYw==}\n+ /@typescript-eslint/[email protected](@typescript-eslint/[email protected])([email protected])([email protected]):\n+ resolution: {integrity: sha512-yVrXupeHjRxLDcPKL10sGQ/QlVrA8J5IYOEWVqk0lJaSZP7X5DfnP7Ns3cc74/blmbipQ1htFNVGsHX6wsYm0A==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n '@typescript-eslint/parser': ^5.0.0\n@@ -8861,13 +8838,13 @@ packages:\n typescript:\n optional: true\n dependencies:\n- '@eslint-community/regexpp': 4.5.0\n- '@typescript-eslint/parser': 5.59.0([email protected])([email protected])\n- '@typescript-eslint/scope-manager': 5.59.0\n- '@typescript-eslint/type-utils': 5.59.0([email protected])([email protected])\n- '@typescript-eslint/utils': 5.59.0([email protected])([email protected])\n+ '@eslint-community/regexpp': 4.5.1\n+ '@typescript-eslint/parser': 5.59.2([email protected])([email protected])\n+ '@typescript-eslint/scope-manager': 5.59.2\n+ '@typescript-eslint/type-utils': 5.59.2([email protected])([email protected])\n+ '@typescript-eslint/utils': 5.59.2([email protected])([email protected])\n debug: 4.3.4([email protected])\n- eslint: 8.39.0\n+ eslint: 8.40.0\n grapheme-splitter: 1.0.4\n ignore: 5.2.4\n natural-compare-lite: 1.4.0\n@@ -8877,8 +8854,8 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /@typescript-eslint/[email protected]([email protected])([email protected]):\n- resolution: {integrity: sha512-qK9TZ70eJtjojSUMrrEwA9ZDQ4N0e/AuoOIgXuNBorXYcBDk397D2r5MIe1B3cok/oCtdNC5j+lUUpVB+Dpb+w==}\n+ /@typescript-eslint/[email protected]([email protected])([email protected]):\n+ resolution: {integrity: sha512-uq0sKyw6ao1iFOZZGk9F8Nro/8+gfB5ezl1cA06SrqbgJAt0SRoFhb9pXaHvkrxUpZaoLxt8KlovHNk8Gp6/HQ==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n eslint: ^6.0.0 || ^7.0.0 || ^8.0.0\n@@ -8887,24 +8864,24 @@ packages:\n typescript:\n optional: true\n dependencies:\n- '@typescript-eslint/scope-manager': 5.59.0\n- '@typescript-eslint/types': 5.59.0\n- '@typescript-eslint/typescript-estree': 5.59.0([email protected])\n+ '@typescript-eslint/scope-manager': 5.59.2\n+ '@typescript-eslint/types': 5.59.2\n+ '@typescript-eslint/typescript-estree': 5.59.2([email protected])\n debug: 4.3.4([email protected])\n- eslint: 8.39.0\n+ eslint: 8.40.0\n typescript: 5.0.4\n transitivePeerDependencies:\n - supports-color\n \n- /@typescript-eslint/[email protected]:\n- resolution: {integrity: sha512-tsoldKaMh7izN6BvkK6zRMINj4Z2d6gGhO2UsI8zGZY3XhLq1DndP3Ycjhi1JwdwPRwtLMW4EFPgpuKhbCGOvQ==}\n+ /@typescript-eslint/[email protected]:\n+ resolution: {integrity: sha512-dB1v7ROySwQWKqQ8rEWcdbTsFjh2G0vn8KUyvTXdPoyzSL6lLGkiXEV5CvpJsEe9xIdKV+8Zqb7wif2issoOFA==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n dependencies:\n- '@typescript-eslint/types': 5.59.0\n- '@typescript-eslint/visitor-keys': 5.59.0\n+ '@typescript-eslint/types': 5.59.2\n+ '@typescript-eslint/visitor-keys': 5.59.2\n \n- /@typescript-eslint/[email protected]([email protected])([email protected]):\n- resolution: {integrity: sha512-d/B6VSWnZwu70kcKQSCqjcXpVH+7ABKH8P1KNn4K7j5PXXuycZTPXF44Nui0TEm6rbWGi8kc78xRgOC4n7xFgA==}\n+ /@typescript-eslint/[email protected]([email protected])([email protected]):\n+ resolution: {integrity: sha512-b1LS2phBOsEy/T381bxkkywfQXkV1dWda/z0PhnIy3bC5+rQWQDS7fk9CSpcXBccPY27Z6vBEuaPBCKCgYezyQ==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n eslint: '*'\n@@ -8913,21 +8890,21 @@ packages:\n typescript:\n optional: true\n dependencies:\n- '@typescript-eslint/typescript-estree': 5.59.0([email protected])\n- '@typescript-eslint/utils': 5.59.0([email protected])([email protected])\n+ '@typescript-eslint/typescript-estree': 5.59.2([email protected])\n+ '@typescript-eslint/utils': 5.59.2([email protected])([email protected])\n debug: 4.3.4([email protected])\n- eslint: 8.39.0\n+ eslint: 8.40.0\n tsutils: 3.21.0([email protected])\n typescript: 5.0.4\n transitivePeerDependencies:\n - supports-color\n \n- /@typescript-eslint/[email protected]:\n- resolution: {integrity: sha512-yR2h1NotF23xFFYKHZs17QJnB51J/s+ud4PYU4MqdZbzeNxpgUr05+dNeCN/bb6raslHvGdd6BFCkVhpPk/ZeA==}\n+ /@typescript-eslint/[email protected]:\n+ resolution: {integrity: sha512-LbJ/HqoVs2XTGq5shkiKaNTuVv5tTejdHgfdjqRUGdYhjW1crm/M7og2jhVskMt8/4wS3T1+PfFvL1K3wqYj4w==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n \n- /@typescript-eslint/[email protected]([email protected]):\n- resolution: {integrity: sha512-sUNnktjmI8DyGzPdZ8dRwW741zopGxltGs/SAPgGL/AAgDpiLsCFLcMNSpbfXfmnNeHmK9h3wGmCkGRGAoUZAg==}\n+ /@typescript-eslint/[email protected]([email protected]):\n+ resolution: {integrity: sha512-+j4SmbwVmZsQ9jEyBMgpuBD0rKwi9RxRpjX71Brr73RsYnEr3Lt5QZ624Bxphp8HUkSKfqGnPJp1kA5nl0Sh7Q==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n typescript: '*'\n@@ -8935,8 +8912,8 @@ packages:\n typescript:\n optional: true\n dependencies:\n- '@typescript-eslint/types': 5.59.0\n- '@typescript-eslint/visitor-keys': 5.59.0\n+ '@typescript-eslint/types': 5.59.2\n+ '@typescript-eslint/visitor-keys': 5.59.2\n debug: 4.3.4([email protected])\n globby: 11.1.0\n is-glob: 4.0.3\n@@ -8946,31 +8923,31 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /@typescript-eslint/[email protected]([email protected])([email protected]):\n- resolution: {integrity: sha512-GGLFd+86drlHSvPgN/el6dRQNYYGOvRSDVydsUaQluwIW3HvbXuxyuD5JETvBt/9qGYe+lOrDk6gRrWOHb/FvA==}\n+ /@typescript-eslint/[email protected]([email protected])([email protected]):\n+ resolution: {integrity: sha512-kSuF6/77TZzyGPhGO4uVp+f0SBoYxCDf+lW3GKhtKru/L8k/Hd7NFQxyWUeY7Z/KGB2C6Fe3yf2vVi4V9TsCSQ==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n peerDependencies:\n eslint: ^6.0.0 || ^7.0.0 || ^8.0.0\n dependencies:\n- '@eslint-community/eslint-utils': 4.4.0([email protected])\n+ '@eslint-community/eslint-utils': 4.4.0([email protected])\n '@types/json-schema': 7.0.11\n '@types/semver': 7.3.13\n- '@typescript-eslint/scope-manager': 5.59.0\n- '@typescript-eslint/types': 5.59.0\n- '@typescript-eslint/typescript-estree': 5.59.0([email protected])\n- eslint: 8.39.0\n+ '@typescript-eslint/scope-manager': 5.59.2\n+ '@typescript-eslint/types': 5.59.2\n+ '@typescript-eslint/typescript-estree': 5.59.2([email protected])\n+ eslint: 8.40.0\n eslint-scope: 5.1.1\n semver: 7.5.0\n transitivePeerDependencies:\n - supports-color\n - typescript\n \n- /@typescript-eslint/[email protected]:\n- resolution: {integrity: sha512-qZ3iXxQhanchCeaExlKPV3gDQFxMUmU35xfd5eCXB6+kUw1TUAbIy2n7QIrwz9s98DQLzNWyHp61fY0da4ZcbA==}\n+ /@typescript-eslint/[email protected]:\n+ resolution: {integrity: sha512-EEpsO8m3RASrKAHI9jpavNv9NlEUebV4qmF1OWxSTtKSFBpC1NCmWazDQHFivRf0O1DV11BA645yrLEVQ0/Lig==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n dependencies:\n- '@typescript-eslint/types': 5.59.0\n- eslint-visitor-keys: 3.4.0\n+ '@typescript-eslint/types': 5.59.2\n+ eslint-visitor-keys: 3.4.1\n \n /@webassemblyjs/[email protected]:\n resolution: {integrity: sha512-LHY/GSAZZRpsNQH+/oHqhRQ5FT7eoULcBqgfyTB5nQHogFnK3/7QoN7dLnwSE/JkUAF0SrRuclT7ODqMFtWxxQ==}\n@@ -9063,27 +9040,27 @@ packages:\n '@webassemblyjs/ast': 1.11.5\n '@xtuc/long': 4.2.2\n \n- /@webpack-cli/[email protected]([email protected])([email protected]):\n+ /@webpack-cli/[email protected]([email protected])([email protected]):\n resolution: {integrity: sha512-njsdJXJSiS2iNbQVS0eT8A/KPnmyH4pv1APj2K0d1wrZcBLw+yppxOy4CGqa0OxDJkzfL/XELDhD8rocnIwB5A==}\n engines: {node: '>=14.15.0'}\n peerDependencies:\n webpack: 5.x.x\n webpack-cli: 5.x.x\n dependencies:\n- webpack: 5.80.0([email protected])\n- webpack-cli: 5.0.2([email protected])([email protected])\n+ webpack: 5.82.0([email protected])\n+ webpack-cli: 5.0.2([email protected])([email protected])\n \n- /@webpack-cli/[email protected]([email protected])([email protected]):\n+ /@webpack-cli/[email protected]([email protected])([email protected]):\n resolution: {integrity: sha512-fE1UEWTwsAxRhrJNikE7v4EotYflkEhBL7EbajfkPlf6E37/2QshOy/D48Mw8G5XMFlQtS6YV42vtbG9zBpIQA==}\n engines: {node: '>=14.15.0'}\n peerDependencies:\n webpack: 5.x.x\n webpack-cli: 5.x.x\n dependencies:\n- webpack: 5.80.0([email protected])\n- webpack-cli: 5.0.2([email protected])([email protected])\n+ webpack: 5.82.0([email protected])\n+ webpack-cli: 5.0.2([email protected])([email protected])\n \n- /@webpack-cli/[email protected]([email protected])([email protected]):\n+ /@webpack-cli/[email protected]([email protected])([email protected]):\n resolution: {integrity: sha512-S9h3GmOmzUseyeFW3tYNnWS7gNUuwxZ3mmMq0JyW78Vx1SGKPSkt5bT4pB0rUnVfHjP0EL9gW2bOzmtiTfQt0A==}\n engines: {node: '>=14.15.0'}\n peerDependencies:\n@@ -9094,8 +9071,8 @@ packages:\n webpack-dev-server:\n optional: true\n dependencies:\n- webpack: 5.80.0([email protected])\n- webpack-cli: 5.0.2([email protected])([email protected])\n+ webpack: 5.82.0([email protected])\n+ webpack-cli: 5.0.2([email protected])([email protected])\n \n /@xtuc/[email protected]:\n resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==}\n@@ -9107,8 +9084,8 @@ packages:\n resolution: {integrity: sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==}\n dev: true\n \n- /@yarnpkg/[email protected]:\n- resolution: {integrity: sha512-eW9Mbegmb5bJjwawJM9ghjUjUqciNMhC6L7XrQPF/clXS5bbP66MstsgCT5hy9VlfUh/CfBT+0Wucf531dMjHA==}\n+ /@yarnpkg/[email protected]:\n+ resolution: {integrity: sha512-AhFF3mIDfA+jEwQv2WMHmiYhOvmdbh2qhUkDVQfiqzQtUwS4BgoWwom5NpSPg4Ix5vOul+w1690Bt21CkVLpgg==}\n engines: {node: '>=14.15.0'}\n dependencies:\n js-yaml: 3.14.1\n@@ -9156,8 +9133,8 @@ packages:\n negotiator: 0.6.3\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-ETLeQ3X1XvcWckOZFR+KvTectZyEwDm2p+CckWazS+xsK3THHVxn/PkfkPr37OTNKVY/yJRx29JGERV77YQYXw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-7iRX9MsxyhMUsqfWpWrJVf7dmv0nQcidOQOhzfYLQnNELdVpaqXVWcewfQqEHP+M0RR2TNie0gqoxPSstUc8Ww==}\n dev: true\n \n /[email protected]:\n@@ -9314,6 +9291,10 @@ packages:\n resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}\n engines: {node: '>=8'}\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==}\n+ engines: {node: '>=12'}\n+\n /[email protected]:\n resolution: {integrity: sha512-lEm8mt52to2fT8GhciPCGeCXACSz2UwIN4X2e2LJSnZ5uAbn2/dsYdOmUXq0AtWS5cpAupysIneExOgH0Vd2TQ==}\n dev: true\n@@ -9340,6 +9321,10 @@ packages:\n engines: {node: '>=10'}\n dev: true\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==}\n+ engines: {node: '>=12'}\n+\n /[email protected]:\n resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}\n engines: {node: '>= 8'}\n@@ -9462,8 +9447,8 @@ packages:\n - debug\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-PEcdkk7JcdPiMDkvM4K6ZBRYq9keuVJsToxm2zQIM70Qqo2WHTdJZMXcG9X+RmRp2VPNUQC8W1RAGbgt6b1yMg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==}\n dependencies:\n follow-redirects: 1.15.2\n form-data: 4.0.0\n@@ -9472,48 +9457,48 @@ packages:\n - debug\n dev: true\n \n- /[email protected](@babel/[email protected])([email protected]):\n+ /[email protected](@babel/[email protected])([email protected]):\n resolution: {integrity: sha512-mN14niXW43tddohGl8HPu5yfQq70iUThvFL/4QzESA7GcZoC0eVOhvWdQ8+3UlSjaDE9MVtsW9mxDY07W7VpVA==}\n engines: {node: '>= 14.15.0'}\n peerDependencies:\n '@babel/core': ^7.12.0\n webpack: '>=5'\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n find-cache-dir: 3.3.2\n schema-utils: 4.0.1\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n \n- /[email protected](@babel/[email protected]):\n+ /[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/compat-data': 7.21.4\n- '@babel/core': 7.21.4\n- '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n+ '@babel/compat-data': 7.21.7\n+ '@babel/core': 7.21.8\n+ '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n semver: 6.3.0\n transitivePeerDependencies:\n - supports-color\n \n- /[email protected](@babel/[email protected]):\n+ /[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n core-js-compat: 3.30.1\n transitivePeerDependencies:\n - supports-color\n \n- /[email protected](@babel/[email protected]):\n+ /[email protected](@babel/[email protected]):\n resolution: {integrity: sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw==}\n peerDependencies:\n '@babel/core': ^7.0.0-0\n dependencies:\n- '@babel/core': 7.21.4\n- '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n+ '@babel/core': 7.21.8\n+ '@babel/helper-define-polyfill-provider': 0.3.3(@babel/[email protected])\n transitivePeerDependencies:\n - supports-color\n \n@@ -9537,7 +9522,7 @@ packages:\n resolution: {integrity: sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==}\n engines: {node: '>= 10.0.0'}\n dependencies:\n- '@babel/types': 7.21.4\n+ '@babel/types': 7.21.5\n dev: true\n \n /[email protected]:\n@@ -9561,9 +9546,9 @@ packages:\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n cmd-shim: 6.0.1\n- npm-normalize-package-bin: 3.0.0\n+ npm-normalize-package-bin: 3.0.1\n read-cmd-shim: 4.0.0\n- write-file-atomic: 5.0.0\n+ write-file-atomic: 5.0.1\n dev: true\n \n /[email protected]:\n@@ -9637,8 +9622,8 @@ packages:\n engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}\n hasBin: true\n dependencies:\n- caniuse-lite: 1.0.30001481\n- electron-to-chromium: 1.4.369\n+ caniuse-lite: 1.0.30001485\n+ electron-to-chromium: 1.4.385\n node-releases: 2.0.10\n update-browserslist-db: 1.0.11([email protected])\n \n@@ -9703,31 +9688,28 @@ packages:\n promise-inflight: 1.0.1\n rimraf: 3.0.2\n ssri: 9.0.1\n- tar: 6.1.13\n+ tar: 6.1.11\n unique-filename: 2.0.1\n transitivePeerDependencies:\n - bluebird\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-Y/PRQevNSsjAPWykl9aeGz8Pr+OI6BYM9fYDNMvOkuUiG9IhG4LEmaYrZZZvioMUEQ+cBCxT0v8wrnCURccyKA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-hXpFU+Z3AfVmNuiLve1qxWHMq0RSIt5gjCKAHi/M6DktwFwDdAXAtunl1i4WSKaaVcU9IsRvXFg42jTHigcC6Q==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n '@npmcli/fs': 3.1.0\n- fs-minipass: 3.0.1\n- glob: 9.3.5\n+ fs-minipass: 3.0.2\n+ glob: 10.2.2\n lru-cache: 7.18.3\n- minipass: 4.2.8\n+ minipass: 5.0.0\n minipass-collect: 1.0.2\n minipass-flush: 1.0.5\n minipass-pipeline: 1.2.4\n p-map: 4.0.0\n- promise-inflight: 1.0.1\n- ssri: 10.0.3\n- tar: 6.1.13\n+ ssri: 10.0.4\n+ tar: 6.1.11\n unique-filename: 3.0.0\n- transitivePeerDependencies:\n- - bluebird\n dev: true\n \n /[email protected]:\n@@ -9800,8 +9782,8 @@ packages:\n engines: {node: '>=10'}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-KCqHwRnaa1InZBtqXzP98LPg0ajCVujMKjqKDhZEthIpAsJl/YEIa3YvXjGXPVqzZVguccuu7ga9KOE1J9rKPQ==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-8aUpZ7sjhlOyiNsg+pgcrTTPUXKh+rg544QYHSvQErljVEKJzvkYkCR/hUFeeVoEfTToUtY9cUKNRC7+c45YkA==}\n \n /[email protected]:\n resolution: {integrity: sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw==}\n@@ -9914,6 +9896,11 @@ packages:\n resolution: {integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==}\n dev: true\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==}\n+ engines: {node: '>=8'}\n+ dev: true\n+\n /[email protected]:\n resolution: {integrity: sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A==}\n engines: {node: '>= 4.0'}\n@@ -9945,8 +9932,8 @@ packages:\n engines: {node: '>=6'}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-/eG5sJcvEIwxcdYM86k5tPwn0MUzkX5YY3eImTGpJOZgVe4SdTMY14vQpcxgBzJ0wXwAYrS8E+c3uHeK4JNyzQ==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-4/aL9X3Wh0yiMQlE+eeRhWP6vclO3QRtw1JHKIT0FFUs5FjpFmESqtMvYZ0+lbzBw900b95mS0hohy+qn2VK/g==}\n engines: {node: '>=6'}\n dev: true\n \n@@ -10090,8 +10077,8 @@ packages:\n dot-prop: 5.3.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-4UZlZP8Z99MGEY+Ovg/uJxJuvoXuN4M6B3hKaiackiHrgzQFEe3diJi1mf1PNHbFujM7FvLrK2bpgIaImbtZ1A==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-cFhkjbGY1jLFWIV7KegECbfuyYPxSGvgGkdkfM+ibboQDoPwg2FRHm5BSNTOApiauRBzJIQH7qvOJs2sW5ueKQ==}\n dev: true\n \n /[email protected]:\n@@ -10133,8 +10120,8 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==}\n dependencies:\n- '@babel/parser': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/parser': 7.21.8\n+ '@babel/types': 7.21.5\n dev: true\n \n /[email protected]:\n@@ -10289,7 +10276,7 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}\n \n- /[email protected](@types/[email protected])([email protected])([email protected])([email protected]):\n+ /[email protected](@types/[email protected])([email protected])([email protected])([email protected]):\n resolution: {integrity: sha512-NTxV1MFfZDLPiBMjxbHRwSh5LaLcPMwNdCutmnHJCKoVnlvldPWlllonKwrsRJ5pYZBIBGRWWU2tfvzxgeSW5Q==}\n engines: {node: '>=12', npm: '>=6'}\n peerDependencies:\n@@ -10298,9 +10285,9 @@ packages:\n ts-node: '>=10'\n typescript: '>=3'\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n cosmiconfig: 8.1.3\n- ts-node: 10.9.1(@types/[email protected])([email protected])\n+ ts-node: 10.9.1(@types/[email protected])([email protected])\n typescript: 5.0.4\n dev: true\n \n@@ -10504,7 +10491,7 @@ packages:\n engines: {node: '>=10'}\n dependencies:\n globby: 11.1.0\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n is-glob: 4.0.3\n is-path-cwd: 2.2.0\n is-path-inside: 3.0.3\n@@ -10609,6 +10596,9 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==}\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}\n+\n /[email protected]:\n resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}\n dev: true\n@@ -10621,17 +10611,17 @@ packages:\n jake: 10.8.5\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-LfxbHXdA/S+qyoTEA4EbhxGjrxx7WK2h6yb5K2v0UCOufUKX+VZaHbl3svlzZfv9sGseym/g3Ne4DpsgRULmqg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-L9zlje9bIw0h+CwPQumiuVlfMcV4boxRjFIWDcLfFqTZNbkwOExBzfmswytHawObQX4OUhtNv8gIiB21kOurIg==}\n \n- /[email protected]:\n- resolution: {integrity: sha512-V0isWbyLYiXrSCcB4lrSVhS/U56NFGfuqHyc+yEPkyhhvY+h4F85cYGdEiZlXp6XjHT+/CLHmw0ltK54g9lvDw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-fEYAftYqFhveniWJbEHXjNMWjooFFIuqNj/eEFJkGzycInfBJq/c4E/dew++s6s0YLubxFnjoF2qZiqapLj0gA==}\n engines: {node: '>= 12.20.55'}\n hasBin: true\n requiresBuild: true\n dependencies:\n '@electron/get': 2.0.2\n- '@types/node': 18.16.0\n+ '@types/node': 18.16.5\n extract-zip: 2.0.1\n transitivePeerDependencies:\n - supports-color\n@@ -10640,6 +10630,9 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}\n+\n /[email protected]:\n resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==}\n engines: {node: '>= 0.8'}\n@@ -10720,26 +10713,26 @@ packages:\n resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}\n engines: {node: '>=10'}\n \n- /[email protected]([email protected]):\n+ /[email protected]([email protected]):\n resolution: {integrity: sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==}\n hasBin: true\n peerDependencies:\n eslint: '>=7.0.0'\n dependencies:\n- eslint: 8.39.0\n+ eslint: 8.40.0\n \n- /[email protected]([email protected]):\n- resolution: {integrity: sha512-32Sx5I9VzO/bqbtslCu3L1GHIPo+QEliwqwjWq+qzbUv76wrkH6ifUEE0EbkuNEn+cHlSIOrg/IJ1PGNN72QZA==}\n- engines: {node: ^14 || ^16 || ^17 || ^18 || ^19 || ^20}\n+ /[email protected]([email protected]):\n+ resolution: {integrity: sha512-Hst7XUfqh28UmPD52oTXmjaRN3d0KrmOZdgtp4h9/VHUJD3Evoo82ZGXi1TtRDWgWhvqDIRI63O49H0eH7NrZQ==}\n+ engines: {node: '>=16'}\n peerDependencies:\n eslint: ^7.0.0 || ^8.0.0\n dependencies:\n- '@es-joy/jsdoccomment': 0.37.1\n+ '@es-joy/jsdoccomment': 0.38.0\n are-docs-informative: 0.0.2\n comment-parser: 1.3.1\n debug: 4.3.4([email protected])\n escape-string-regexp: 4.0.0\n- eslint: 8.39.0\n+ eslint: 8.40.0\n esquery: 1.5.0\n semver: 7.5.0\n spdx-expression-parse: 3.0.1\n@@ -10766,19 +10759,19 @@ packages:\n esrecurse: 4.3.0\n estraverse: 5.3.0\n \n- /[email protected]:\n- resolution: {integrity: sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n \n- /[email protected]:\n- resolution: {integrity: sha512-mwiok6cy7KTW7rBpo05k6+p4YVZByLNjAZ/ACB9DRCu4YDRwjXI01tWHp6KAUWelsBetTxKK/2sHB0vdS8Z2Og==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-bvR+TsP9EHL3TqNtj9sCNJVAFK3fBN8Q7g5waghxyRsPLIMwL73XSKnZFK0hk/O2ANC+iAoq6PWMQ+IfBAJIiQ==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n hasBin: true\n dependencies:\n- '@eslint-community/eslint-utils': 4.4.0([email protected])\n- '@eslint-community/regexpp': 4.5.0\n- '@eslint/eslintrc': 2.0.2\n- '@eslint/js': 8.39.0\n+ '@eslint-community/eslint-utils': 4.4.0([email protected])\n+ '@eslint-community/regexpp': 4.5.1\n+ '@eslint/eslintrc': 2.0.3\n+ '@eslint/js': 8.40.0\n '@humanwhocodes/config-array': 0.11.8\n '@humanwhocodes/module-importer': 1.0.1\n '@nodelib/fs.walk': 1.2.8\n@@ -10789,8 +10782,8 @@ packages:\n doctrine: 3.0.0\n escape-string-regexp: 4.0.0\n eslint-scope: 7.2.0\n- eslint-visitor-keys: 3.4.0\n- espree: 9.5.1\n+ eslint-visitor-keys: 3.4.1\n+ espree: 9.5.2\n esquery: 1.5.0\n esutils: 2.0.3\n fast-deep-equal: 3.1.3\n@@ -10818,13 +10811,13 @@ packages:\n transitivePeerDependencies:\n - supports-color\n \n- /[email protected]:\n- resolution: {integrity: sha512-5yxtHSZXRSW5pvv3hAlXM5+/Oswi1AUFqBmbibKb5s6bp3rGIDkyXU6xCoyuuLhijr4SFwPrXRoZjz0AZDN9tg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==}\n engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}\n dependencies:\n acorn: 8.8.2\n acorn-jsx: 5.3.2([email protected])\n- eslint-visitor-keys: 3.4.0\n+ eslint-visitor-keys: 3.4.1\n \n /[email protected]:\n resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==}\n@@ -11203,7 +11196,7 @@ packages:\n engines: {node: '>=10'}\n dependencies:\n at-least-node: 1.0.0\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n jsonfile: 6.1.0\n universalify: 2.0.0\n dev: true\n@@ -11214,11 +11207,11 @@ packages:\n dependencies:\n minipass: 3.3.6\n \n- /[email protected]:\n- resolution: {integrity: sha512-MhaJDcFRTuLidHrIttu0RDGyyXs/IYHVmlcxfLAEFIWjc1vdLAkdwT7Ace2u7DbitWC0toKMl5eJZRYNVreIMw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-2GAfyfoaCDRrM6jaOS3UsBts8yJ55VioXdWcOL7dK9zdAuKT71+WBA4ifnNYqVjYv+4SsPxjK0JT4yIIn4cA/g==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- minipass: 4.2.8\n+ minipass: 5.0.0\n dev: true\n \n /[email protected]:\n@@ -11262,15 +11255,15 @@ packages:\n wide-align: 1.1.5\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-0s5T5eciEG7Q3ugkxAkFtaDhrrhXsCRivA5y8C9WMHWuI8UlMOJg7+Iwf7Mccii+Dfs3H5jHepU0joPVyQU0Lw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-CmykPMJGuNan/3S4kZOpvvPYSNqSHANiWnh9XcMU2pSjtBfF0XzZ2p1bFAxTbnFxyBuPxQYHhzwaoOmUdqzvxQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n aproba: 2.0.0\n color-support: 1.1.3\n console-control-strings: 1.1.0\n has-unicode: 2.0.1\n- signal-exit: 3.0.7\n+ signal-exit: 4.0.1\n string-width: 4.2.3\n strip-ansi: 6.0.1\n wide-align: 1.1.5\n@@ -11408,7 +11401,7 @@ packages:\n hasBin: true\n dependencies:\n foreground-child: 3.1.1\n- jackspeak: 2.1.0\n+ jackspeak: 2.2.0\n minimatch: 9.0.0\n minipass: 5.0.0\n path-scurry: 1.7.0\n@@ -11419,7 +11412,7 @@ packages:\n fs.realpath: 1.0.0\n inflight: 1.0.6\n inherits: 2.0.4\n- minimatch: 3.1.2\n+ minimatch: 3.0.5\n once: 1.4.0\n path-is-absolute: 1.0.1\n dev: true\n@@ -11771,11 +11764,11 @@ packages:\n minimatch: 5.1.6\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-ezmQ1Dg2b3jVZh2Dh+ar6Eu2MqNSTkyb32HU2MAQQQX9tKM3q/UQ/9lf03lQ5hW+fOeoMnwxwkleZ0xcNp0/qg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-C7FfFoTA+bI10qfeydT8aZbvr91vAEU+2W5BZUlzPec47oNb07SsOfwYrtxuvOYdUApPP/Qlh4DtAO51Ekk2QA==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- minimatch: 7.4.6\n+ minimatch: 9.0.0\n dev: true\n \n /[email protected]:\n@@ -11859,7 +11852,7 @@ packages:\n mute-stream: 0.0.8\n ora: 5.4.1\n run-async: 2.4.1\n- rxjs: 7.8.0\n+ rxjs: 7.8.1\n string-width: 4.2.3\n strip-ansi: 6.0.1\n through: 2.3.8\n@@ -11880,7 +11873,7 @@ packages:\n mute-stream: 0.0.8\n ora: 5.4.1\n run-async: 2.4.1\n- rxjs: 7.8.0\n+ rxjs: 7.8.1\n string-width: 4.2.3\n strip-ansi: 6.0.1\n through: 2.3.8\n@@ -12113,7 +12106,7 @@ packages:\n resolution: {integrity: sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==}\n engines: {node: '>=8'}\n dependencies:\n- '@babel/core': 7.21.4\n+ '@babel/core': 7.21.8\n '@istanbuljs/schema': 0.1.3\n istanbul-lib-coverage: 3.2.0\n semver: 6.3.0\n@@ -12161,11 +12154,11 @@ packages:\n istanbul-lib-report: 3.0.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-DiEwVPqsieUzZBNxQ2cxznmFzfg/AMgJUjYw5xl6rSmCxAQXECcbSdwcLM6Ds6T09+SBfSNCGPhYUoQ96P4h7A==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-r5XBrqIJfwRIjRt/Xr5fv9Wh09qyhHfKnYddDlpM+ibRR20qrYActpCAgU6U+d53EOEjzkvxPMVHSlgR7leXrQ==}\n engines: {node: '>=14'}\n dependencies:\n- cliui: 7.0.4\n+ '@isaacs/cliui': 8.0.2\n optionalDependencies:\n '@pkgjs/parseargs': 0.11.0\n \n@@ -12188,7 +12181,7 @@ packages:\n resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}\n engines: {node: '>= 10.13.0'}\n dependencies:\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n merge-stream: 2.0.0\n supports-color: 8.1.1\n \n@@ -12336,12 +12329,12 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-vmVSD3ubZ8jwkiDKVW5MB5ESI/MUm4trVUw5WnT4j5FV6m81liA2YfQ0l84PlN4qJ3DCeYWFWfprOUoCjzkDhQ==}\n dependencies:\n- ace-builds: 1.18.0\n+ ace-builds: 1.19.0\n ajv: 6.12.6\n javascript-natural-sort: 0.7.1\n jmespath: 0.16.0\n json-source-map: 0.6.1\n- jsonrepair: 3.0.3\n+ jsonrepair: 3.1.0\n mobius1-selectr: 2.4.13\n picomodal: 3.0.0\n vanilla-picker: 2.12.1\n@@ -12365,8 +12358,8 @@ packages:\n engines: {'0': node >= 0.2.0}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-3Rhz4/UXWBSERKVCPzbfoIQ7CPdKCKkzvcjJdtoRn+aazf1o8dnpGl+PoT58kekOP8EIaA0o/zt70qFqEE3fVA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-idqReg23J0PVRAADmZMc5xQM3xeOX5bTB6OTyMnzq33IXJXmn9iJuWIEvGmrN80rQf4d7uLTMEDwpzujNcI0Rg==}\n hasBin: true\n dev: true\n \n@@ -12421,17 +12414,17 @@ packages:\n engines: {node: '>=0.10.0'}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-WJtrvmbmR+6hMB9b5pvsxJzew0lRL6hARgW/My9BM4vYaxwPIA2I0riv3qQu5Zd7lYse7FEqJkTnl9Kn1bXhLA==}\n- engines: {node: ^14.15.0 || >=16.0.0}\n+ /[email protected]:\n+ resolution: {integrity: sha512-W4qrGhcdutkRdHEaDf9eqp7u4JvI+1TwFy5woX6OI8WPe4PYBdxuILAsvhp614fUG41rKSGDKlOh+AWzdSidTg==}\n+ engines: {node: ^14.17.0 || >=16.0.0}\n hasBin: true\n dependencies:\n- '@lerna/child-process': 6.6.1\n- '@lerna/create': 6.6.1\n- '@lerna/legacy-package-management': 6.6.1([email protected])\n+ '@lerna/child-process': 6.6.2\n+ '@lerna/create': 6.6.2\n+ '@lerna/legacy-package-management': 6.6.2([email protected])\n '@npmcli/arborist': 6.2.3\n '@npmcli/run-script': 4.1.7\n- '@nrwl/devkit': 15.9.2([email protected])\n+ '@nrwl/devkit': 15.9.4([email protected])\n '@octokit/plugin-enterprise-rest': 6.0.1\n '@octokit/rest': 19.0.3\n byte-size: 7.0.0\n@@ -12462,8 +12455,8 @@ packages:\n is-ci: 2.0.0\n is-stream: 2.0.0\n js-yaml: 4.1.0\n- libnpmaccess: 6.0.3\n- libnpmpublish: 6.0.4\n+ libnpmaccess: 6.0.4\n+ libnpmpublish: 7.1.4\n load-json-file: 6.2.0\n make-dir: 3.1.0\n minimatch: 3.0.5\n@@ -12471,7 +12464,7 @@ packages:\n node-fetch: 2.6.7\n npm-package-arg: 8.1.1\n npm-packlist: 5.1.1\n- npm-registry-fetch: 14.0.4\n+ npm-registry-fetch: 14.0.5\n npmlog: 6.0.2\n nx: 15.9.3\n p-map: 4.0.0\n@@ -12480,7 +12473,7 @@ packages:\n p-queue: 6.6.2\n p-reduce: 2.1.0\n p-waterfall: 2.1.1\n- pacote: 13.6.2\n+ pacote: 15.1.1\n pify: 5.0.0\n read-cmd-shim: 3.0.0\n read-package-json: 5.0.1\n@@ -12518,8 +12511,8 @@ packages:\n prelude-ls: 1.2.1\n type-check: 0.4.0\n \n- /[email protected]:\n- resolution: {integrity: sha512-4tkfUZprwvih2VUZYMozL7EMKgQ5q9VW2NtRyxWtQWlkLTAWHRklcAvBN49CVqEkhUw7vTX2fNgB5LzgUucgYg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-qZ3wcfIyUoW0+qSFkMBovcTrSGJ3ZeyvpR7d5N9pEYv/kXs8sHP2wiqEIXBKLFrZlmM0kR0RJD7mtfLngtlLag==}\n engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n dependencies:\n aproba: 2.0.0\n@@ -12531,17 +12524,19 @@ packages:\n - supports-color\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-lvAEYW8mB8QblL6Q/PI/wMzKNvIrF7Kpujf/4fGS/32a2i3jzUXi04TNyIBcK6dQJ34IgywfaKGh+Jq4HYPFmg==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n+ /[email protected]:\n+ resolution: {integrity: sha512-mMntrhVwut5prP4rJ228eEbEyvIzLWhqFuY90j5QeXBCTT2pWSMno7Yo2S2qplPUr02zPurGH4heGLZ+wORczg==}\n+ engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- normalize-package-data: 4.0.1\n- npm-package-arg: 9.1.2\n- npm-registry-fetch: 13.3.1\n+ ci-info: 3.8.0\n+ normalize-package-data: 5.0.0\n+ npm-package-arg: 10.1.0\n+ npm-registry-fetch: 14.0.5\n+ proc-log: 3.0.0\n semver: 7.5.0\n- ssri: 9.0.1\n+ sigstore: 1.4.0\n+ ssri: 10.0.4\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n@@ -12576,7 +12571,7 @@ packages:\n resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==}\n engines: {node: '>=4'}\n dependencies:\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n parse-json: 4.0.0\n pify: 3.0.0\n strip-bom: 3.0.0\n@@ -12586,7 +12581,7 @@ packages:\n resolution: {integrity: sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==}\n engines: {node: '>=8'}\n dependencies:\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n parse-json: 5.2.0\n strip-bom: 4.0.0\n type-fest: 0.6.0\n@@ -12780,27 +12775,26 @@ packages:\n - supports-color\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-7ChuOzCb1LzdQZrTy0ky6RsCoMYeM+Fh4cY0+4zsJVhNcH5Q3OJojLY1mGkD0xAhWB29lskECVb6ZopofwjldA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-rLWS7GCSTcEujjVBs2YqG7Y4643u8ucvCJeSRqiLYhesrDuzeuFIk37xREzAsfQaqzl8b9rNCE4m6J8tvX4Q8w==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n agentkeepalive: 4.3.0\n- cacache: 17.0.5\n+ cacache: 17.1.0\n http-cache-semantics: 4.1.1\n http-proxy-agent: 5.0.0\n https-proxy-agent: 5.0.1\n is-lambda: 1.0.1\n lru-cache: 7.18.3\n- minipass: 4.2.8\n- minipass-fetch: 3.0.2\n+ minipass: 5.0.0\n+ minipass-fetch: 3.0.3\n minipass-flush: 1.0.5\n minipass-pipeline: 1.2.4\n negotiator: 0.6.3\n promise-retry: 2.0.1\n socks-proxy-agent: 7.0.0\n- ssri: 10.0.3\n+ ssri: 10.0.4\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n@@ -12847,7 +12841,7 @@ packages:\n redent: 3.0.0\n trim-newlines: 3.0.1\n type-fest: 0.18.1\n- yargs-parser: 20.2.9\n+ yargs-parser: 20.2.4\n dev: true\n \n /[email protected]:\n@@ -12964,13 +12958,6 @@ packages:\n brace-expansion: 2.0.1\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==}\n- engines: {node: '>=10'}\n- dependencies:\n- brace-expansion: 2.0.1\n- dev: true\n-\n /[email protected]:\n resolution: {integrity: sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==}\n engines: {node: '>=16 || 14 >=14.17'}\n@@ -13015,11 +13002,11 @@ packages:\n encoding: 0.1.13\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-/ZpF1CQaWYqjbhfFgKNt3azxztEpc/JUPuMkqOgrnMQqcU8CbE409AUdJYTIWryl3PP5CBaTJZT71N49MXP/YA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-n5ITsTkDqYkYJZjcRWzZt9qnZKCT7nKCosJhHoj7S7zD+BP4jVbWs+odsniw5TA3E0sLomhTKOKjF86wf11PuQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- minipass: 4.2.8\n+ minipass: 5.0.0\n minipass-sized: 1.0.3\n minizlib: 2.1.2\n optionalDependencies:\n@@ -13063,6 +13050,7 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==}\n engines: {node: '>=8'}\n+ dev: true\n \n /[email protected]:\n resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==}\n@@ -13156,7 +13144,7 @@ packages:\n array-differ: 3.0.0\n array-union: 2.1.0\n arrify: 2.0.1\n- minimatch: 3.1.2\n+ minimatch: 3.0.5\n dev: true\n \n /[email protected]:\n@@ -13232,13 +13220,13 @@ packages:\n dependencies:\n env-paths: 2.2.1\n glob: 7.2.3\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n make-fetch-happen: 10.2.1\n nopt: 6.0.0\n npmlog: 6.0.2\n rimraf: 3.0.2\n semver: 7.5.0\n- tar: 6.1.13\n+ tar: 6.1.11\n which: 2.0.2\n transitivePeerDependencies:\n - bluebird\n@@ -13375,14 +13363,7 @@ packages:\n resolution: {integrity: sha512-Vq0eyEQy+elFpzsKjMss9kxqb9tG3YHg4dsyWuUENuzvSUWe1TCnW/vV9FkhvBk/brEDoDiVd+M1Btosa6ImdQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- npm-normalize-package-bin: 3.0.0\n- dev: true\n-\n- /[email protected]:\n- resolution: {integrity: sha512-65lUsMI8ztHCxFz5ckCEC44DRvEGdZX5usQFriauxHEwt7upv1FKaQEmAtU0YnOAdwuNWCmk64xYiQABNrEyLA==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n- dependencies:\n- semver: 7.5.0\n+ npm-normalize-package-bin: 3.0.1\n dev: true\n \n /[email protected]:\n@@ -13396,13 +13377,8 @@ packages:\n resolution: {integrity: sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-awzfKUO7v0FscrSpRoogyNm0sajikhBWpU0QMrW09AMi9n1PoKU6WaIqUzuJSQnpciZZmJ/jMZ2Egfmb/9LiWQ==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n- dev: true\n-\n- /[email protected]:\n- resolution: {integrity: sha512-g+DPQSkusnk7HYXr75NtzkIP4+N81i3RPsGFidF3DzHd9MT9wWngmqoeg/fnHFz5MNdtG4w03s+QnhewSLTT2Q==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dev: true\n \n@@ -13450,17 +13426,7 @@ packages:\n resolution: {integrity: sha512-d6RGEuRrNS5/N84iglPivjaJPxhDbZmlbTwTDX2IbcRHG5bZCdtysYMhwiPvcF4GisXHGn7xsxv+GQ7T/02M5Q==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- ignore-walk: 6.0.2\n- dev: true\n-\n- /[email protected]:\n- resolution: {integrity: sha512-gk37SyRmlIjvTfcYl6RzDbSmS9Y4TOBXfsPnoYqTHARNgWbyDiCSMLUpmALDj4jjcTZpURiEfsSHJj9k7EV4Rw==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n- dependencies:\n- npm-install-checks: 5.0.0\n- npm-normalize-package-bin: 2.0.0\n- npm-package-arg: 9.1.2\n- semver: 7.5.0\n+ ignore-walk: 6.0.3\n dev: true\n \n /[email protected]:\n@@ -13468,7 +13434,7 @@ packages:\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n npm-install-checks: 6.1.1\n- npm-normalize-package-bin: 3.0.0\n+ npm-normalize-package-bin: 3.0.1\n npm-package-arg: 10.1.0\n semver: 7.5.0\n dev: true\n@@ -13493,31 +13459,29 @@ packages:\n resolution: {integrity: sha512-YaeRbVNpnWvsGOjX2wk5s85XJ7l1qQBGAp724h8e2CZFFhMSuw9enom7K1mWVUtvXO1uUSFIAPofQK0pPN0ZcA==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- make-fetch-happen: 11.1.0\n+ make-fetch-happen: 11.1.1\n minipass: 4.2.8\n- minipass-fetch: 3.0.2\n+ minipass-fetch: 3.0.3\n minipass-json-stream: 1.0.1\n minizlib: 2.1.2\n npm-package-arg: 10.1.0\n proc-log: 3.0.0\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-pMS2DRkwg+M44ct65zrN/Cr9IHK1+n6weuefAo6Er4lc+/8YBCU0Czq04H3ZiSigluh7pb2rMM5JpgcytctB+Q==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-kIDMIo4aBm6xg7jOttupWZamsZRkAqMqwqqbVXnUqstY5+tapvv6bkH/qMR76jdgV+YljEUCyWx3hRYMrJiAgA==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- make-fetch-happen: 11.1.0\n- minipass: 4.2.8\n- minipass-fetch: 3.0.2\n+ make-fetch-happen: 11.1.1\n+ minipass: 5.0.0\n+ minipass-fetch: 3.0.3\n minipass-json-stream: 1.0.1\n minizlib: 2.1.2\n npm-package-arg: 10.1.0\n proc-log: 3.0.0\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n@@ -13559,7 +13523,7 @@ packages:\n dependencies:\n are-we-there-yet: 4.0.0\n console-control-strings: 1.1.0\n- gauge: 5.0.0\n+ gauge: 5.0.1\n set-blocking: 2.0.0\n dev: true\n \n@@ -13601,9 +13565,9 @@ packages:\n '@nrwl/tao': 15.9.3\n '@parcel/watcher': 2.0.4\n '@yarnpkg/lockfile': 1.1.0\n- '@yarnpkg/parsers': 3.0.0-rc.42\n+ '@yarnpkg/parsers': 3.0.0-rc.43\n '@zkochan/js-yaml': 0.0.6\n- axios: 1.3.6\n+ axios: 1.4.0\n chalk: 4.1.2\n cli-cursor: 3.1.0\n cli-spinners: 2.6.1\n@@ -13630,7 +13594,7 @@ packages:\n tsconfig-paths: 4.2.0\n tslib: 2.5.0\n v8-compile-cache: 2.3.0\n- yargs: 17.7.1\n+ yargs: 17.7.2\n yargs-parser: 21.1.1\n optionalDependencies:\n '@nrwl/nx-darwin-arm64': 15.9.3\n@@ -13646,8 +13610,8 @@ packages:\n - debug\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-Q76wS7oWsbxi7lxyhI9d10MOvNhbxjDU0J40gdg8DhIi8QWJ5q5SfkD5+Vn1mYy9AA7zyKUF8CGh2kYJMpWPpA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-wm3g4IT7PTCcyX+n1WuuFVa6TQTfbPGS6kJLS62LpALFmN85EoiIGjClEZjTO6MEJHKpL/BjXscsLXnkboEytg==}\n hasBin: true\n requiresBuild: true\n peerDependencies:\n@@ -13659,12 +13623,12 @@ packages:\n '@swc/core':\n optional: true\n dependencies:\n- '@nrwl/tao': 16.0.3\n+ '@nrwl/tao': 16.1.1\n '@parcel/watcher': 2.0.4\n '@yarnpkg/lockfile': 1.1.0\n- '@yarnpkg/parsers': 3.0.0-rc.42\n+ '@yarnpkg/parsers': 3.0.0-rc.43\n '@zkochan/js-yaml': 0.0.6\n- axios: 1.3.6\n+ axios: 1.4.0\n chalk: 4.1.2\n cli-cursor: 3.1.0\n cli-spinners: 2.6.1\n@@ -13691,18 +13655,18 @@ packages:\n tsconfig-paths: 4.2.0\n tslib: 2.5.0\n v8-compile-cache: 2.3.0\n- yargs: 17.7.1\n+ yargs: 17.7.2\n yargs-parser: 21.1.1\n optionalDependencies:\n- '@nx/nx-darwin-arm64': 16.0.3\n- '@nx/nx-darwin-x64': 16.0.3\n- '@nx/nx-linux-arm-gnueabihf': 16.0.3\n- '@nx/nx-linux-arm64-gnu': 16.0.3\n- '@nx/nx-linux-arm64-musl': 16.0.3\n- '@nx/nx-linux-x64-gnu': 16.0.3\n- '@nx/nx-linux-x64-musl': 16.0.3\n- '@nx/nx-win32-arm64-msvc': 16.0.3\n- '@nx/nx-win32-x64-msvc': 16.0.3\n+ '@nx/nx-darwin-arm64': 16.1.1\n+ '@nx/nx-darwin-x64': 16.1.1\n+ '@nx/nx-linux-arm-gnueabihf': 16.1.1\n+ '@nx/nx-linux-arm64-gnu': 16.1.1\n+ '@nx/nx-linux-arm64-musl': 16.1.1\n+ '@nx/nx-linux-x64-gnu': 16.1.1\n+ '@nx/nx-linux-x64-musl': 16.1.1\n+ '@nx/nx-win32-arm64-msvc': 16.1.1\n+ '@nx/nx-win32-x64-msvc': 16.1.1\n transitivePeerDependencies:\n - debug\n dev: true\n@@ -13811,7 +13775,7 @@ packages:\n bl: 4.1.0\n chalk: 4.1.2\n cli-cursor: 3.1.0\n- cli-spinners: 2.8.0\n+ cli-spinners: 2.9.0\n is-interactive: 1.0.0\n is-unicode-supported: 0.1.0\n log-symbols: 4.1.0\n@@ -13956,60 +13920,29 @@ packages:\n release-zalgo: 1.0.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-Gu8fU3GsvOPkak2CkbojR7vjs3k3P9cA6uazKTHdsdV0gpCEQq2opelnEv30KRQWgVzP5Vd/5umjcedma3MKtg==}\n- engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}\n- hasBin: true\n- dependencies:\n- '@npmcli/git': 3.0.2\n- '@npmcli/installed-package-contents': 1.0.7\n- '@npmcli/promise-spawn': 3.0.0\n- '@npmcli/run-script': 4.1.7\n- cacache: 16.1.3\n- chownr: 2.0.0\n- fs-minipass: 2.1.0\n- infer-owner: 1.0.4\n- minipass: 3.3.6\n- mkdirp: 1.0.4\n- npm-package-arg: 9.1.2\n- npm-packlist: 5.1.1\n- npm-pick-manifest: 7.0.2\n- npm-registry-fetch: 13.3.1\n- proc-log: 2.0.1\n- promise-retry: 2.0.1\n- read-package-json: 5.0.1\n- read-package-json-fast: 2.0.3\n- rimraf: 3.0.2\n- ssri: 9.0.1\n- tar: 6.1.13\n- transitivePeerDependencies:\n- - bluebird\n- - supports-color\n- dev: true\n-\n- /[email protected]:\n- resolution: {integrity: sha512-EAGJrMiIjBTBB6tWGrx9hFJTOo14B3HSAoa/W9SawFEBhUqjxN7qqaFlGVF9jfY/mIri8Mb2xafmkRgWxYXxIQ==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-eeqEe77QrA6auZxNHIp+1TzHQ0HBKf5V6c8zcaYZ134EJe1lCi+fjXATkNiEEfbG+e50nu02GLvUtmZcGOYabQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n hasBin: true\n dependencies:\n '@npmcli/git': 4.0.4\n '@npmcli/installed-package-contents': 2.0.2\n '@npmcli/promise-spawn': 6.0.2\n- '@npmcli/run-script': 6.0.0\n- cacache: 17.0.5\n- fs-minipass: 3.0.1\n+ '@npmcli/run-script': 6.0.1\n+ cacache: 17.1.0\n+ fs-minipass: 3.0.2\n minipass: 4.2.8\n npm-package-arg: 10.1.0\n npm-packlist: 7.0.4\n npm-pick-manifest: 8.0.1\n- npm-registry-fetch: 14.0.4\n+ npm-registry-fetch: 14.0.5\n proc-log: 3.0.0\n promise-retry: 2.0.1\n- read-package-json: 6.0.1\n+ read-package-json: 6.0.3\n read-package-json-fast: 3.0.2\n- sigstore: 1.3.2\n- ssri: 10.0.3\n- tar: 6.1.13\n+ sigstore: 1.4.0\n+ ssri: 10.0.4\n+ tar: 6.1.11\n transitivePeerDependencies:\n - bluebird\n - supports-color\n@@ -14190,8 +14123,8 @@ packages:\n deprecated: You can find the new Popper v2 at @popperjs/core, this package is dedicated to the legacy v1\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-NdxGCAZdRrwVI1sy59+Wzrh+pMMHxapGnpfenDVlMEXoOcvt4pGE0JLK9YY2F5dLxcFYA/YbVQKhcGU+FtSYQg==}\n engines: {node: '>=4'}\n dependencies:\n cssesc: 3.0.0\n@@ -14602,7 +14535,7 @@ packages:\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n json-parse-even-better-errors: 3.0.0\n- npm-normalize-package-bin: 3.0.0\n+ npm-normalize-package-bin: 3.0.1\n dev: true\n \n /[email protected]:\n@@ -14615,14 +14548,14 @@ packages:\n npm-normalize-package-bin: 1.0.1\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-AaHqXxfAVa+fNL07x8iAghfKOds/XXsu7zoouIVsbm7PEbQ3nMWXlvjcbrNLjElnUHWQtAo4QEa0RXuvD4XlpA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-4QbpReW4kxFgeBQ0vPAqh2y8sXEB3D4t3jsXbJKIhBiF80KT6XRo45reqwtftju5J6ru1ax06A2Gb/wM1qCOEQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- glob: 9.3.5\n+ glob: 10.2.2\n json-parse-even-better-errors: 3.0.0\n normalize-package-data: 5.0.0\n- npm-normalize-package-bin: 3.0.0\n+ npm-normalize-package-bin: 3.0.1\n dev: true\n \n /[email protected]:\n@@ -14756,7 +14689,7 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg==}\n dependencies:\n- '@babel/runtime': 7.21.0\n+ '@babel/runtime': 7.21.5\n \n /[email protected]:\n resolution: {integrity: sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==}\n@@ -14923,15 +14856,8 @@ packages:\n dependencies:\n queue-microtask: 1.2.3\n \n- /[email protected]:\n- resolution: {integrity: sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==}\n- engines: {npm: '>=2.0.0'}\n- dependencies:\n- tslib: 1.14.1\n- dev: true\n-\n- /[email protected]:\n- resolution: {integrity: sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==}\n dependencies:\n tslib: 2.5.0\n dev: true\n@@ -14950,8 +14876,8 @@ packages:\n /[email protected]:\n resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}\n \n- /[email protected]:\n- resolution: {integrity: sha512-Q4USplo4pLYgCi+XlipZCWUQz5pkg/ruSSgJ0WRDSb/+3z9tXUOkQ7QPYn4XrhZKYAK4HlpaQecRwKLJX6+DBg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-NHpxIzN29MXvWiuswfc1W3I0N8SXBd8UR26WntmDlRYf0bSADnwnOjsyMZ3lMezSlArD33Vs3YFhp7dWvL770A==}\n engines: {node: '>=14.0.0'}\n hasBin: true\n dependencies:\n@@ -15116,8 +15042,8 @@ packages:\n resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}\n engines: {node: '>=8'}\n \n- /[email protected]:\n- resolution: {integrity: sha512-+Jz4nBkCBe0mEDqo1eKRcCdjRtrCjozmcbTUjbPTX7OOJfEbTZzlUWlZtGe3Gb5oV1/jnojhG//YZc3rs9zSEw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-ltSZlSLOuSY0M0Y75KA+ieRaZ0Trf5Wl3gutE7jzLuIcWxLp5i/uEnLoQWNvgKXQ5OMpGkJnVMRLAuzjc0LJ2A==}\n dependencies:\n ansi-sequence-parser: 1.1.0\n jsonc-parser: 3.2.0\n@@ -15140,16 +15066,15 @@ packages:\n resolution: {integrity: sha512-uUWsN4aOxJAS8KOuf3QMyFtgm1pkb6I+KRZbRF/ghdf5T7sM+B1lLLzPDxswUjkmHyxQAVzEgG35E3NzDM9GVw==}\n engines: {node: '>=14'}\n \n- /[email protected]:\n- resolution: {integrity: sha512-0KT1DjpVB11FK15ep7BIsZQV6j1jBm4SnXIInbBCRvql6II39IKONOMO+j036sGsArU/+2xqa1NDJwJkic0neA==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-N7TRpSbFjY/TrFDg6yGAQSYBrQ5s6qmPiq4pD6fkv1LoyfMsLG0NwZWG2s5q+uttLHgyVyTa0Rogx2P78rN8kQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n hasBin: true\n dependencies:\n '@sigstore/protobuf-specs': 0.1.0\n- make-fetch-happen: 11.1.0\n- tuf-js: 1.1.4\n+ make-fetch-happen: 11.1.1\n+ tuf-js: 1.1.5\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n@@ -15302,11 +15227,11 @@ packages:\n dev: true\n optional: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-lJtX/BFPI/VEtxZmLfeh7pzisIs6micwZ3eruD3+ds9aPsXKlYpwDS2Q7omD6WC42WO9+bnUSzlMmfv8uK8meg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-12+IR2CB2C28MMAw0Ncqwj5QbTcs0nGIhgJzYWzDkb21vWmfNI83KS4f3Ci6GI98WreIfG7o9UXp3C0qbpA8nQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- minipass: 4.2.8\n+ minipass: 5.0.0\n dev: true\n \n /[email protected]:\n@@ -15333,6 +15258,14 @@ packages:\n is-fullwidth-code-point: 3.0.0\n strip-ansi: 6.0.1\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==}\n+ engines: {node: '>=12'}\n+ dependencies:\n+ eastasianwidth: 0.2.0\n+ emoji-regex: 9.2.2\n+ strip-ansi: 7.0.1\n+\n /[email protected]:\n resolution: {integrity: sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==}\n \n@@ -15359,6 +15292,12 @@ packages:\n dependencies:\n ansi-regex: 5.0.1\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==}\n+ engines: {node: '>=12'}\n+ dependencies:\n+ ansi-regex: 6.0.1\n+\n /[email protected]:\n resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==}\n engines: {node: '>=4'}\n@@ -15484,13 +15423,13 @@ packages:\n yallist: 4.0.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-piERznXu0U7/pW7cdSn7hjqySIVTYT6F76icmFk7ptU7dDYlXTm5r9A6K04R2vU3olYgoKeo1Cg3eeu5nhftAw==}\n engines: {node: '>=10'}\n dependencies:\n chownr: 2.0.0\n fs-minipass: 2.1.0\n- minipass: 4.2.8\n+ minipass: 5.0.0\n minizlib: 2.1.2\n mkdirp: 1.0.4\n yallist: 4.0.0\n@@ -15516,7 +15455,7 @@ packages:\n unique-string: 2.0.0\n dev: true\n \n- /[email protected]([email protected]):\n+ /[email protected]([email protected]):\n resolution: {integrity: sha512-AfKwIktyP7Cu50xNjXF/6Qb5lBNzYaWpU6YfoX3uZicTx0zTy0stDDCsvjDapKsSDvOeWo5MEq4TmdBy2cNoHw==}\n engines: {node: '>= 10.13.0'}\n peerDependencies:\n@@ -15537,7 +15476,7 @@ packages:\n schema-utils: 3.1.2\n serialize-javascript: 6.0.1\n terser: 5.17.1\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n \n /[email protected]:\n resolution: {integrity: sha512-hVl35zClmpisy6oaoKALOpS0rDYLxRFLHhRuDlEGTKey9qHjS1w9GMORjuwIMt70Wan4lwsLYyWDVnWgF+KUEw==}\n@@ -15687,7 +15626,7 @@ packages:\n typescript: 4.9.5\n dev: true\n \n- /[email protected](@types/[email protected])([email protected]):\n+ /[email protected](@types/[email protected])([email protected]):\n resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}\n hasBin: true\n peerDependencies:\n@@ -15706,7 +15645,7 @@ packages:\n '@tsconfig/node12': 1.0.11\n '@tsconfig/node14': 1.0.3\n '@tsconfig/node16': 1.0.3\n- '@types/node': 16.18.24\n+ '@types/node': 16.18.26\n acorn: 8.8.2\n acorn-walk: 8.2.0\n arg: 4.1.3\n@@ -15718,38 +15657,7 @@ packages:\n yn: 3.1.1\n dev: true\n \n- /[email protected](@types/[email protected])([email protected]):\n- resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}\n- hasBin: true\n- peerDependencies:\n- '@swc/core': '>=1.2.50'\n- '@swc/wasm': '>=1.2.50'\n- '@types/node': '*'\n- typescript: '>=2.7'\n- peerDependenciesMeta:\n- '@swc/core':\n- optional: true\n- '@swc/wasm':\n- optional: true\n- dependencies:\n- '@cspotcode/source-map-support': 0.8.1\n- '@tsconfig/node10': 1.0.9\n- '@tsconfig/node12': 1.0.11\n- '@tsconfig/node14': 1.0.3\n- '@tsconfig/node16': 1.0.3\n- '@types/node': 18.16.0\n- acorn: 8.8.2\n- acorn-walk: 8.2.0\n- arg: 4.1.3\n- create-require: 1.1.1\n- diff: 4.0.2\n- make-error: 1.3.6\n- typescript: 5.0.4\n- v8-compile-cache-lib: 3.0.1\n- yn: 3.1.1\n- dev: true\n-\n- /[email protected](@types/[email protected])([email protected]):\n+ /[email protected](@types/[email protected])([email protected]):\n resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}\n hasBin: true\n peerDependencies:\n@@ -15768,7 +15676,7 @@ packages:\n '@tsconfig/node12': 1.0.11\n '@tsconfig/node14': 1.0.3\n '@tsconfig/node16': 1.0.3\n- '@types/node': 18.16.3\n+ '@types/node': 20.1.0\n acorn: 8.8.2\n acorn-walk: 8.2.0\n arg: 4.1.3\n@@ -15804,14 +15712,13 @@ packages:\n tslib: 1.14.1\n typescript: 5.0.4\n \n- /[email protected]:\n- resolution: {integrity: sha512-Lw2JRM3HTYhEtQJM2Th3aNCPbnXirtWMl065BawwmM2pX6XStH/ZO9e8T2hh0zk/HUa+1i6j+Lv6eDitKTau6A==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-inqodgxdsmuxrtQVbu6tPNgRKWD1Boy3VB6GO7KczJZpAHiTukwhSzXUSzvDcw5pE2Jo8ua+e1ykpHv7VdPVlQ==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n- '@tufjs/models': 1.0.3\n- make-fetch-happen: 11.1.0\n+ '@tufjs/models': 1.0.4\n+ make-fetch-happen: 11.1.1\n transitivePeerDependencies:\n- - bluebird\n - supports-color\n dev: true\n \n@@ -15938,7 +15845,7 @@ packages:\n lunr: 2.3.9\n marked: 4.3.0\n minimatch: 9.0.0\n- shiki: 0.14.1\n+ shiki: 0.14.2\n typescript: 5.0.4\n dev: true\n \n@@ -15947,13 +15854,13 @@ packages:\n hasBin: true\n dependencies:\n '@types/json-schema': 7.0.11\n- '@types/node': 16.18.24\n+ '@types/node': 16.18.26\n glob: 7.2.3\n path-equal: 1.2.5\n safe-stable-stringify: 2.4.3\n- ts-node: 10.9.1(@types/[email protected])([email protected])\n+ ts-node: 10.9.1(@types/[email protected])([email protected])\n typescript: 4.9.5\n- yargs: 17.7.1\n+ yargs: 17.7.2\n transitivePeerDependencies:\n - '@swc/core'\n - '@swc/wasm'\n@@ -16231,7 +16138,7 @@ packages:\n - bufferutil\n - utf-8-validate\n \n- /[email protected]([email protected])([email protected]):\n+ /[email protected]([email protected])([email protected]):\n resolution: {integrity: sha512-4y3W5Dawri5+8dXm3+diW6Mn1Ya+Dei6eEVAdIduAmYNLzv1koKVAqsfgrrc9P2mhrYHQphx5htnGkcNwtubyQ==}\n engines: {node: '>=14.15.0'}\n hasBin: true\n@@ -16249,9 +16156,9 @@ packages:\n optional: true\n dependencies:\n '@discoveryjs/json-ext': 0.5.7\n- '@webpack-cli/configtest': 2.0.1([email protected])([email protected])\n- '@webpack-cli/info': 2.0.1([email protected])([email protected])\n- '@webpack-cli/serve': 2.0.2([email protected])([email protected])\n+ '@webpack-cli/configtest': 2.0.1([email protected])([email protected])\n+ '@webpack-cli/info': 2.0.1([email protected])([email protected])\n+ '@webpack-cli/serve': 2.0.2([email protected])([email protected])\n colorette: 2.0.20\n commander: 10.0.1\n cross-spawn: 7.0.3\n@@ -16260,7 +16167,7 @@ packages:\n import-local: 3.1.0\n interpret: 3.1.1\n rechoir: 0.8.0\n- webpack: 5.80.0([email protected])\n+ webpack: 5.82.0([email protected])\n webpack-bundle-analyzer: 4.8.0\n webpack-merge: 5.8.0\n \n@@ -16269,14 +16176,14 @@ packages:\n engines: {node: '>=10.0.0'}\n dependencies:\n clone-deep: 4.0.1\n- wildcard: 2.0.0\n+ wildcard: 2.0.1\n \n /[email protected]:\n resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==}\n engines: {node: '>=10.13.0'}\n \n- /[email protected]([email protected]):\n- resolution: {integrity: sha512-OIMiq37XK1rWO8mH9ssfFKZsXg4n6klTEDL7S8/HqbAOBBaiy8ABvXvz0dDCXeEF9gqwxSvVk611zFPjS8hJxA==}\n+ /[email protected]([email protected]):\n+ resolution: {integrity: sha512-iGNA2fHhnDcV1bONdUu554eZx+XeldsaeQ8T67H6KKHl2nUSwX8Zm7cmzOA46ox/X1ARxf7Bjv8wQ/HsB5fxBg==}\n engines: {node: '>=10.13.0'}\n hasBin: true\n peerDependencies:\n@@ -16306,9 +16213,9 @@ packages:\n neo-async: 2.6.2\n schema-utils: 3.1.2\n tapable: 2.2.1\n- terser-webpack-plugin: 5.3.7([email protected])\n+ terser-webpack-plugin: 5.3.7([email protected])\n watchpack: 2.4.0\n- webpack-cli: 5.0.2([email protected])([email protected])\n+ webpack-cli: 5.0.2([email protected])([email protected])\n webpack-sources: 3.2.3\n transitivePeerDependencies:\n - '@swc/core'\n@@ -16356,8 +16263,8 @@ packages:\n dependencies:\n isexe: 2.0.0\n \n- /[email protected]:\n- resolution: {integrity: sha512-nla//68K9NU6yRiwDY/Q8aU6siKlSs64aEC7+IV56QoAuyQT2ovsJcgGYGyqMOmI/CGN1BOR6mM5EN0FBO+zyQ==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n hasBin: true\n dependencies:\n@@ -16369,8 +16276,8 @@ packages:\n dependencies:\n string-width: 4.2.3\n \n- /[email protected]:\n- resolution: {integrity: sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==}\n \n /[email protected]:\n resolution: {integrity: sha512-1pTPQDKTdd61ozlKGNCjhNRd+KPmgLSGa3mZTHoOliaGcESD8G1PXhh7c1fgiPjVbNVfgy2Faw4BI8/m0cC8Mg==}\n@@ -16388,8 +16295,8 @@ packages:\n resolution: {integrity: sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==}\n engines: {node: '>= 10.0.0'}\n dependencies:\n- '@babel/parser': 7.21.4\n- '@babel/types': 7.21.4\n+ '@babel/parser': 7.21.8\n+ '@babel/types': 7.21.5\n assert-never: 1.2.1\n babel-walk: 3.0.0-canary-5\n dev: true\n@@ -16428,13 +16335,21 @@ packages:\n string-width: 4.2.3\n strip-ansi: 6.0.1\n \n+ /[email protected]:\n+ resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==}\n+ engines: {node: '>=12'}\n+ dependencies:\n+ ansi-styles: 6.2.1\n+ string-width: 5.1.2\n+ strip-ansi: 7.0.1\n+\n /[email protected]:\n resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}\n \n /[email protected]:\n resolution: {integrity: sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==}\n dependencies:\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n imurmurhash: 0.1.4\n signal-exit: 3.0.7\n dev: true\n@@ -16456,12 +16371,12 @@ packages:\n signal-exit: 3.0.7\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-R7NYMnHSlV42K54lwY9lvW6MnSm1HSJqZL3xiSgi9E7//FYaI74r2G0rd+/X6VAMkHEdzxQaU5HUOXWUz5kA/w==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==}\n engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}\n dependencies:\n imurmurhash: 0.1.4\n- signal-exit: 3.0.7\n+ signal-exit: 4.0.1\n dev: true\n \n /[email protected]:\n@@ -16469,7 +16384,7 @@ packages:\n engines: {node: '>=6'}\n dependencies:\n detect-indent: 5.0.0\n- graceful-fs: 4.2.11\n+ graceful-fs: 4.2.10\n make-dir: 2.1.0\n pify: 4.0.1\n sort-keys: 2.0.0\n@@ -16604,8 +16519,8 @@ packages:\n y18n: 5.0.8\n yargs-parser: 20.2.9\n \n- /[email protected]:\n- resolution: {integrity: sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==}\n engines: {node: '>=12'}\n dependencies:\n cliui: 8.0.1\n"}
chore(clickhouse): use anonymous function for clickhouse countif due to invalid sqlglot rewrite
c736e92e002ee8ecca9567636843f601a5f0a148
chore
https://github.com/rohankumardubey/ibis/commit/c736e92e002ee8ecca9567636843f601a5f0a148
use anonymous function for clickhouse countif due to invalid sqlglot rewrite
{"values.py": "@@ -138,10 +138,17 @@ def _array_slice_op(op, *, arg, start, stop, **_):\n return F.arraySlice(arg, start_correct)\n \n \n+@translate_val.register(ops.Count)\n+def _count(op, *, arg, where, **_):\n+ if where is not None:\n+ return sg.exp.Anonymous(this=\"countIf\", expressions=[arg, where])\n+ return sg.exp.Count(this=arg)\n+\n+\n @translate_val.register(ops.CountStar)\n def _count_star(op, *, where, **_):\n if where is not None:\n- return F.countIf(where)\n+ return sg.exp.Anonymous(this=\"countIf\", expressions=[where])\n return sg.exp.Count(this=STAR)\n \n \n@@ -744,7 +751,6 @@ _simple_ops = {\n ops.ArgMin: \"argMin\",\n ops.ArgMax: \"argMax\",\n ops.ArrayCollect: \"groupArray\",\n- ops.Count: \"count\",\n ops.CountDistinct: \"uniq\",\n ops.First: \"any\",\n ops.Last: \"anyLast\",\n"}
feat(migrations): allow subscribing to migrator events Closes #5763
18a425c091af0535d6b43dfb797edade58822fd2
feat
https://github.com/mikro-orm/mikro-orm/commit/18a425c091af0535d6b43dfb797edade58822fd2
allow subscribing to migrator events Closes #5763
{"index.ts": "@@ -4,8 +4,8 @@\n */\n /* istanbul ignore file */\n export {\n- Constructor, ConnectionType, Dictionary, PrimaryKeyProp, Primary, IPrimaryKey, ObjectQuery, FilterQuery, IWrappedEntity, EntityName, EntityData, Highlighter,\n- AnyEntity, EntityClass, EntityProperty, EntityMetadata, QBFilterQuery, PopulateOptions, Populate, Loaded, New, LoadedReference, LoadedCollection, IMigrator, IMigrationGenerator,\n+ Constructor, ConnectionType, Dictionary, PrimaryKeyProp, Primary, IPrimaryKey, ObjectQuery, FilterQuery, IWrappedEntity, EntityName, EntityData, Highlighter, MaybePromise,\n+ AnyEntity, EntityClass, EntityProperty, EntityMetadata, QBFilterQuery, PopulateOptions, Populate, Loaded, New, LoadedReference, LoadedCollection, IMigrator, IMigrationGenerator, MigratorEvent,\n GetRepository, EntityRepositoryType, MigrationObject, DeepPartial, PrimaryProperty, Cast, IsUnknown, EntityDictionary, EntityDTO, MigrationDiff, GenerateOptions, FilterObject,\n IEntityGenerator, ISeedManager, EntityClassGroup, OptionalProps, EagerProps, HiddenProps, RequiredEntityData, CheckCallback, SimpleColumnMeta, Rel, Ref, ScalarRef, EntityRef, ISchemaGenerator,\n UmzugMigration, MigrateOptions, MigrationResult, MigrationRow, EntityKey, EntityValue, FilterKey, Opt, EntityType, FromEntityType, Selected, IsSubset, NoInfer,\n", "typings.ts": "@@ -919,12 +919,24 @@ export interface IMigrator {\n */\n down(options?: string | string[] | MigrateOptions): Promise<UmzugMigration[]>;\n \n+ /**\n+ * Registers event handler.\n+ */\n+ on(event: MigratorEvent, listener: (event: UmzugMigration) => MaybePromise<void>): IMigrator;\n+\n+ /**\n+ * Removes event handler.\n+ */\n+ off(event: MigratorEvent, listener: (event: UmzugMigration) => MaybePromise<void>): IMigrator;\n+\n /**\n * @internal\n */\n getStorage(): IMigratorStorage;\n }\n \n+export type MigratorEvent = 'migrating' | 'migrated' | 'reverting' | 'reverted';\n+\n export interface MigrationDiff {\n up: string[];\n down: string[];\n", "Migrator.ts": "@@ -14,6 +14,8 @@ import {\n type Transaction,\n type Configuration,\n type MigrationsOptions,\n+ type MigratorEvent,\n+ type MaybePromise,\n } from '@mikro-orm/core';\n import {\n DatabaseSchema,\n@@ -117,6 +119,22 @@ export class Migrator implements IMigrator {\n };\n }\n \n+ /**\n+ * @inheritDoc\n+ */\n+ on(eventName: MigratorEvent, listener: (event: UmzugMigration) => MaybePromise<void>): this {\n+ this.umzug.on(eventName, listener);\n+ return this;\n+ }\n+\n+ /**\n+ * @inheritDoc\n+ */\n+ off(eventName: MigratorEvent, listener: (event: UmzugMigration) => MaybePromise<void>): this {\n+ this.umzug.off(eventName, listener);\n+ return this;\n+ }\n+\n private createUmzug(): void {\n this.runner = new MigrationRunner(this.driver, this.options, this.config);\n this.storage = new MigrationStorage(this.driver, this.options);\n", "Migrator.mongo.test.ts": "@@ -1,6 +1,6 @@\n (global as any).process.env.FORCE_COLOR = 0;\n import { Umzug } from 'umzug';\n-import { MikroORM } from '@mikro-orm/core';\n+import type { MikroORM, UmzugMigration } from '@mikro-orm/core';\n import { Migration, Migrator } from '@mikro-orm/migrations-mongodb';\n import { MongoDriver } from '@mikro-orm/mongodb';\n import { remove } from 'fs-extra';\n@@ -198,13 +198,19 @@ describe('Migrator (mongo)', () => {\n \n const mock = mockLogger(orm, ['query']);\n \n+ const migrated: unknown[] = [];\n+ const migratedHandler = (e: UmzugMigration) => { migrated.push(e); };\n+ migrator.on('migrated', migratedHandler);\n+\n await migrator.up(migration.fileName);\n- await migrator.down(migration.fileName.replace('Migration', '').replace('.ts', ''));\n+ await migrator.down(migration.fileName);\n await migrator.up({ migrations: [migration.fileName] });\n await migrator.down({ from: 0, to: 0 } as any);\n+ migrator.off('migrated', migratedHandler);\n await migrator.up({ to: migration.fileName });\n await migrator.up({ from: migration.fileName } as any);\n await migrator.down();\n+ expect(migrated).toHaveLength(2);\n \n await remove(path + '/' + migration.fileName);\n const calls = mock.mock.calls.map(call => {\n", "Migrator.test.ts": "@@ -1,6 +1,6 @@\n (global as any).process.env.FORCE_COLOR = 0;\n import { Umzug } from 'umzug';\n-import type { MikroORM } from '@mikro-orm/core';\n+import type { MikroORM, UmzugMigration } from '@mikro-orm/core';\n import { MetadataStorage } from '@mikro-orm/core';\n import { Migration, MigrationStorage, Migrator } from '@mikro-orm/migrations';\n import type { DatabaseTable, MySqlDriver } from '@mikro-orm/mysql';\n@@ -379,13 +379,19 @@ describe('Migrator', () => {\n \n const mock = mockLogger(orm, ['query']);\n \n+ const migrated: unknown[] = [];\n+ const migratedHandler = (e: UmzugMigration) => { migrated.push(e); };\n+ migrator.on('migrated', migratedHandler);\n+\n await migrator.up(migration.fileName);\n await migrator.down(migration.fileName.replace('Migration', '').replace('.ts', ''));\n await migrator.up({ migrations: [migration.fileName] });\n await migrator.down({ from: 0, to: 0 } as any);\n+ migrator.off('migrated', migratedHandler);\n await migrator.up({ to: migration.fileName });\n await migrator.up({ from: migration.fileName } as any);\n await migrator.down();\n+ expect(migrated).toHaveLength(1);\n \n await remove(path + '/' + migration.fileName);\n const calls = mock.mock.calls.map(call => {\n"}
build: fix formatting
cdffd4a81d69bd0b88009292b1436190ba3190dd
build
https://github.com/tsparticles/tsparticles/commit/cdffd4a81d69bd0b88009292b1436190ba3190dd
fix formatting
{"Options.ts": "@@ -242,9 +242,7 @@ export class Options implements IOptions, IOptionLoader<IOptions> {\n this.load(defaultOptions);\n \n const responsiveOptions = this.responsive.find((t) =>\n- t.mode === ResponsiveMode.screen && screen\n- ? t.maxWidth > screen.availWidth\n- : t.maxWidth * pxRatio > width\n+ t.mode === ResponsiveMode.screen && screen ? t.maxWidth > screen.availWidth : t.maxWidth * pxRatio > width\n );\n \n this.load(responsiveOptions?.options);\n"}
build: updated deps, updated configs for polygon masks
d72d0cb87ef8b5cd2c67c75588af993fe9adb276
build
https://github.com/tsparticles/tsparticles/commit/d72d0cb87ef8b5cd2c67c75588af993fe9adb276
updated deps, updated configs for polygon masks
{"nx.json": "@@ -1,76 +1,75 @@\n {\n- \"targetDefaults\": {\n- \"build\": {\n- \"dependsOn\": [\n- \"^build\"\n- ]\n+ \"targetDefaults\": {\n+ \"build\": {\n+ \"dependsOn\": [\n+ \"^build\"\n+ ]\n+ },\n+ \"build:ci\": {\n+ \"dependsOn\": [\n+ \"^build:ci\"\n+ ]\n+ },\n+ \"prepare\": {\n+ \"dependsOn\": [\n+ \"^build\",\n+ \"^prepare\"\n+ ]\n+ },\n+ \"package\": {\n+ \"dependsOn\": [\n+ \"^build\",\n+ \"^prepare\",\n+ \"^package\"\n+ ]\n+ }\n },\n- \"build:ci\": {\n- \"dependsOn\": [\n- \"^build:ci\"\n- ]\n+ \"extends\": \"@nx/workspace/presets/npm.json\",\n+ \"tasksRunnerOptions\": {\n+ \"default\": {\n+ \"runner\": \"nx-cloud\",\n+ \"options\": {\n+ \"accessToken\": \"YmE3OGY1MTQtNTJjMC00MzdjLTgwYjEtZDk4YWE0OTQ4MTIzfHJlYWQ=\",\n+ \"cacheableOperations\": [\n+ \"build\",\n+ \"build:ci\",\n+ \"test\",\n+ \"lint\",\n+ \"package\",\n+ \"prepare\"\n+ ]\n+ }\n+ },\n+ \"local\": {\n+ \"runner\": \"nx/tasks-runners/default\",\n+ \"options\": {\n+ \"cacheableOperations\": [\n+ \"build\",\n+ \"build:ci\",\n+ \"lint\",\n+ \"test\",\n+ \"e2e\"\n+ ]\n+ }\n+ }\n },\n- \"prepare\": {\n- \"dependsOn\": [\n- \"^build\",\n- \"^prepare\"\n- ]\n+ \"affected\": {\n+ \"defaultBase\": \"main\"\n },\n- \"package\": {\n- \"dependsOn\": [\n- \"^build\",\n- \"^prepare\",\n- \"^package\"\n- ]\n- }\n- },\n- \"extends\": \"@nx/workspace/presets/npm.json\",\n- \"npmScope\": \"@tsparticles/workspace\",\n- \"tasksRunnerOptions\": {\n- \"default\": {\n- \"runner\": \"nx-cloud\",\n- \"options\": {\n- \"accessToken\": \"YmE3OGY1MTQtNTJjMC00MzdjLTgwYjEtZDk4YWE0OTQ4MTIzfHJlYWQ=\",\n- \"cacheableOperations\": [\n- \"build\",\n- \"build:ci\",\n- \"test\",\n- \"lint\",\n- \"package\",\n- \"prepare\"\n- ]\n- }\n+ \"pluginsConfig\": {\n+ \"@nrwl/js\": {\n+ \"analyzeSourceFiles\": false\n+ }\n },\n- \"local\": {\n- \"runner\": \"nx/tasks-runners/default\",\n- \"options\": {\n- \"cacheableOperations\": [\n- \"build\",\n- \"build:ci\",\n- \"lint\",\n- \"test\",\n- \"e2e\"\n+ \"$schema\": \"./node_modules/nx/schemas/nx-schema.json\",\n+ \"namedInputs\": {\n+ \"default\": [\n+ \"{projectRoot}/**/*\",\n+ \"sharedGlobals\"\n+ ],\n+ \"sharedGlobals\": [],\n+ \"production\": [\n+ \"default\"\n ]\n- }\n- }\n- },\n- \"affected\": {\n- \"defaultBase\": \"main\"\n- },\n- \"pluginsConfig\": {\n- \"@nrwl/js\": {\n- \"analyzeSourceFiles\": false\n }\n- },\n- \"$schema\": \"./node_modules/nx/schemas/nx-schema.json\",\n- \"namedInputs\": {\n- \"default\": [\n- \"{projectRoot}/**/*\",\n- \"sharedGlobals\"\n- ],\n- \"sharedGlobals\": [],\n- \"production\": [\n- \"default\"\n- ]\n- }\n }\n", "package.json": "@@ -29,7 +29,7 @@\n \"@commitlint/cli\": \"^18.4.3\",\n \"@commitlint/config-conventional\": \"^18.4.3\",\n \"@istanbuljs/nyc-config-typescript\": \"^1.0.2\",\n- \"@nx/workspace\": \"^16.10.0\",\n+ \"@nx/workspace\": \"^17.1.3\",\n \"@tsparticles/cli\": \"^2.0.0\",\n \"@tsparticles/eslint-config\": \"^2.0.0\",\n \"@tsparticles/prettier-config\": \"^2.0.0\",\n@@ -37,8 +37,8 @@\n \"@tsparticles/webpack-plugin\": \"^2.0.0\",\n \"@types/chai\": \"^4.3.11\",\n \"@types/jsdom\": \"^21.1.6\",\n- \"@types/mocha\": \"^10.0.5\",\n- \"@types/node\": \"^20.9.3\",\n+ \"@types/mocha\": \"^10.0.6\",\n+ \"@types/node\": \"^20.9.4\",\n \"@types/webpack-env\": \"^1.18.4\",\n \"@typescript-eslint/eslint-plugin\": \"^6.12.0\",\n \"@typescript-eslint/parser\": \"^6.12.0\",\n@@ -56,9 +56,9 @@\n \"husky\": \"^8.0.3\",\n \"jsdom\": \"^22.1.0\",\n \"jsdom-global\": \"^3.0.2\",\n- \"lerna\": \"^7.4.2\",\n+ \"lerna\": \"^8.0.0\",\n \"mocha\": \"^10.2.0\",\n- \"nx\": \"^16.10.0\",\n+ \"nx\": \"^17.1.3\",\n \"nx-cloud\": \"^16.5.2\",\n \"nyc\": \"^15.1.0\",\n \"prettier\": \"^3.1.0\",\n", "pnpm-lock.yaml": "@@ -21,8 +21,8 @@ importers:\n specifier: ^1.0.2\n version: 1.0.2([email protected])\n '@nx/workspace':\n- specifier: ^16.10.0\n- version: 16.10.0\n+ specifier: ^17.1.3\n+ version: 17.1.3\n '@tsparticles/cli':\n specifier: ^2.0.0\n version: 2.0.0([email protected])\n@@ -45,11 +45,11 @@ importers:\n specifier: ^21.1.6\n version: 21.1.6\n '@types/mocha':\n- specifier: ^10.0.5\n- version: 10.0.5\n+ specifier: ^10.0.6\n+ version: 10.0.6\n '@types/node':\n- specifier: ^20.9.3\n- version: 20.9.3\n+ specifier: ^20.9.4\n+ version: 20.9.4\n '@types/webpack-env':\n specifier: ^1.18.4\n version: 1.18.4\n@@ -102,14 +102,14 @@ importers:\n specifier: ^3.0.2\n version: 3.0.2([email protected])\n lerna:\n- specifier: ^7.4.2\n- version: 7.4.2\n+ specifier: ^8.0.0\n+ version: 8.0.0\n mocha:\n specifier: ^10.2.0\n version: 10.2.0\n nx:\n- specifier: ^16.10.0\n- version: 16.10.0\n+ specifier: ^17.1.3\n+ version: 17.1.3\n nx-cloud:\n specifier: ^16.5.2\n version: 16.5.2\n@@ -136,7 +136,7 @@ importers:\n version: 1.4.0\n ts-node:\n specifier: ^10.9.1\n- version: 10.9.1(@types/[email protected])([email protected])\n+ version: 10.9.1(@types/[email protected])([email protected])\n typedoc:\n specifier: ^0.25.3\n version: 0.25.3([email protected])\n@@ -3207,22 +3207,12 @@ packages:\n '@jridgewell/sourcemap-codec': 1.4.15\n dev: true\n \n- /@lerna/[email protected]:\n- resolution: {integrity: sha512-je+kkrfcvPcwL5Tg8JRENRqlbzjdlZXyaR88UcnCdNW0AJ1jX9IfHRys1X7AwSroU2ug8ESNC+suoBw1vX833Q==}\n- engines: {node: '>=16.0.0'}\n+ /@lerna/[email protected]([email protected]):\n+ resolution: {integrity: sha512-mCeEhjFDRwPY7J4uxCjqdzPwPFBUGlkdlQjBidaX5XaoQcxR2hAAvgHZKfVGkUUEZKfyPcWwKzen4KydNB2G7A==}\n+ engines: {node: '>=18.0.0'}\n dependencies:\n- chalk: 4.1.2\n- execa: 5.1.1\n- strong-log-transformer: 2.1.0\n- dev: true\n-\n- /@lerna/[email protected]:\n- resolution: {integrity: sha512-1wplFbQ52K8E/unnqB0Tq39Z4e+NEoNrpovEnl6GpsTUrC6WDp8+w0Le2uCBV0hXyemxChduCkLz4/y1H1wTeg==}\n- engines: {node: '>=16.0.0'}\n- dependencies:\n- '@lerna/child-process': 7.4.2\n '@npmcli/run-script': 6.0.2\n- '@nx/devkit': 16.10.0([email protected])\n+ '@nx/devkit': 17.1.3([email protected])\n '@octokit/plugin-enterprise-rest': 6.0.1\n '@octokit/rest': 19.0.11\n byte-size: 8.1.1\n@@ -3232,7 +3222,7 @@ packages:\n columnify: 1.6.0\n conventional-changelog-core: 5.0.1\n conventional-recommended-bump: 7.0.1\n- cosmiconfig: 8.2.0\n+ cosmiconfig: 8.3.6([email protected])\n dedent: 0.7.0\n execa: 5.0.0\n fs-extra: 11.1.1\n@@ -3259,7 +3249,7 @@ packages:\n npm-packlist: 5.1.1\n npm-registry-fetch: 14.0.5\n npmlog: 6.0.2\n- nx: 16.10.0\n+ nx: 17.1.3\n p-map: 4.0.0\n p-map-series: 2.1.0\n p-queue: 6.6.2\n@@ -3283,8 +3273,8 @@ packages:\n validate-npm-package-name: 5.0.0\n write-file-atomic: 5.0.1\n write-pkg: 4.0.0\n- yargs: 16.2.0\n- yargs-parser: 20.2.4\n+ yargs: 17.7.2\n+ yargs-parser: 21.1.1\n transitivePeerDependencies:\n - '@swc-node/register'\n - '@swc/core'\n@@ -3292,6 +3282,7 @@ packages:\n - debug\n - encoding\n - supports-color\n+ - typescript\n dev: true\n \n /@mapbox/[email protected]:\n@@ -3403,10 +3394,10 @@ packages:\n - supports-color\n dev: true\n \n- /@nrwl/[email protected]([email protected]):\n- resolution: {integrity: sha512-fRloARtsDQoQgQ7HKEy0RJiusg/HSygnmg4gX/0n/Z+SUS+4KoZzvHjXc6T5ZdEiSjvLypJ+HBM8dQzIcVACPQ==}\n+ /@nrwl/[email protected]([email protected]):\n+ resolution: {integrity: sha512-8HfIY7P3yIYfQ/XKuHoq0GGLA9GpwWtBlI9kPQ0ygjuJ9BkpiGMtQvO6003zs7c6vpc2vNeG+Jmi72+EKvoN5A==}\n dependencies:\n- '@nx/devkit': 16.10.0([email protected])\n+ '@nx/devkit': 17.1.3([email protected])\n transitivePeerDependencies:\n - nx\n dev: true\n@@ -3419,11 +3410,11 @@ packages:\n - debug\n dev: true\n \n- /@nrwl/[email protected]:\n- resolution: {integrity: sha512-QNAanpINbr+Pod6e1xNgFbzK1x5wmZl+jMocgiEFXZ67KHvmbD6MAQQr0MMz+GPhIu7EE4QCTLTyCEMlAG+K5Q==}\n+ /@nrwl/[email protected]:\n+ resolution: {integrity: sha512-9YpfEkUpVqOweqgQvMDcWApNx4jhCqBNH5IByZj302Enp3TLnQSvhuX5Dfr8hNQRQokIpEn6tW8SGTctTM5LXw==}\n hasBin: true\n dependencies:\n- nx: 16.10.0\n+ nx: 17.1.3\n tslib: 2.6.0\n transitivePeerDependencies:\n - '@swc-node/register'\n@@ -3431,33 +3422,33 @@ packages:\n - debug\n dev: true\n \n- /@nrwl/[email protected]:\n- resolution: {integrity: sha512-fZeNxhFs/2cm326NebfJIgSI3W4KZN94WGS46wlIBrUUGP5/vwHYsi09Kx6sG1kRkAuZVtgJ33uU2F6xcAWzUA==}\n+ /@nrwl/[email protected]:\n+ resolution: {integrity: sha512-V5nLZ58DIZLlJQASYHKo9mUcdm2FbzjJeoKwi0X3VXUvU1ftforFxNIQ7BqS0qjZJKKHjpgZ+cAH0xeVysS5kA==}\n dependencies:\n- '@nx/workspace': 16.10.0\n+ '@nx/workspace': 17.1.3\n transitivePeerDependencies:\n - '@swc-node/register'\n - '@swc/core'\n - debug\n dev: true\n \n- /@nx/[email protected]([email protected]):\n- resolution: {integrity: sha512-IvKQqRJFDDiaj33SPfGd3ckNHhHi6ceEoqCbAP4UuMXOPPVOX6H0KVk+9tknkPb48B7jWIw6/AgOeWkBxPRO5w==}\n+ /@nx/[email protected]([email protected]):\n+ resolution: {integrity: sha512-1Is7ooovg3kdGJ5VdkePulRUDaMYLLULr+LwXgx7oHSW7AY2iCmhkoOE/vSR7DJ6rkey2gYx7eT1IoRoORiIaQ==}\n peerDependencies:\n- nx: '>= 15 <= 17'\n+ nx: '>= 16 <= 18'\n dependencies:\n- '@nrwl/devkit': 16.10.0([email protected])\n+ '@nrwl/devkit': 17.1.3([email protected])\n ejs: 3.1.9\n enquirer: 2.3.6\n ignore: 5.2.4\n- nx: 16.10.0\n+ nx: 17.1.3\n semver: 7.5.3\n tmp: 0.2.1\n tslib: 2.6.0\n dev: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-YF+MIpeuwFkyvM5OwgY/rTNRpgVAI/YiR0yTYCZR+X3AAvP775IVlusNgQ3oedTBRUzyRnI4Tknj1WniENFsvQ==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-f4qLa0y3C4uuhYKgq+MU892WaQvtvmHqrEhHINUOxYXNiLy2sgyJPW0mOZvzXtC4dPaUmiVaFP5RMVzc8Lxhtg==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [darwin]\n@@ -3465,8 +3456,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-ypi6YxwXgb0kg2ixKXE3pwf5myVNUgWf1CsV5OzVccCM8NzheMO51KDXTDmEpXdzUsfT0AkO1sk5GZeCjhVONg==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-kh76ZjqkLeQUIAfTa9G/DFFf+e1sZ5ipDzk7zFGhZ2k68PoQoFdsFOO3C513JmuEdavspts6Hkifsqh61TaE+A==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [darwin]\n@@ -3474,8 +3465,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-UeEYFDmdbbDkTQamqvtU8ibgu5jQLgFF1ruNb/U4Ywvwutw2d4ruOMl2e0u9hiNja9NFFAnDbvzrDcMo7jYqYw==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-CRuVL5ZSLb+Gc8vwMUUe9Pl/1Z26YtXMKTahBMQh2dac63vzLgzqIV4c66aduUl1x2M0kGYBSIIRG9z0/BgWeg==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [freebsd]\n@@ -3483,8 +3474,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-WV3XUC2DB6/+bz1sx+d1Ai9q2Cdr+kTZRN50SOkfmZUQyEBaF6DRYpx/a4ahhxH3ktpNfyY8Maa9OEYxGCBkQA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-KDBmd5tSrg93g/oij/eGW4yeVNVK3DBIM4VYAS2vtkIgVOGoqcQ+SEIeMK3nMUJP9jGyblt3QNj5ZsJBtScwQw==}\n engines: {node: '>= 10'}\n cpu: [arm]\n os: [linux]\n@@ -3492,8 +3483,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-aWIkOUw995V3ItfpAi5FuxQ+1e9EWLS1cjWM1jmeuo+5WtaKToJn5itgQOkvSlPz+HSLgM3VfXMvOFALNk125g==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-W2tNL/7sIwoQKLmuy68Usd6TZzIZvxZt4UE30kDwGc2RSap6RCHAvDbzSxtW+L4+deC9UxX0Tty0VuW+J8FjSg==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [linux]\n@@ -3501,8 +3492,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-uO6Gg+irqpVcCKMcEPIQcTFZ+tDI02AZkqkP7koQAjniLEappd8DnUBSQdcn53T086pHpdc264X/ZEpXFfrKWQ==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-Oto3gkLd7yweuVUCsSHwm4JkAIbcxpPJP0ycRHI/PRHPMIOPiMX8r651QM1amMyKAbJtAe047nyb9Sh1X0FA4A==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [linux]\n@@ -3510,8 +3501,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-134PW/u/arNFAQKpqMJniC7irbChMPz+W+qtyKPAUXE0XFKPa7c1GtlI/wK2dvP9qJDZ6bKf0KtA0U/m2HMUOA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-pJS994sa5PBPFak93RydTB9KdEmiVb3rgiSB7PDBegphERbzHEB77B7G8M5TZ62dGlMdplIEKmdhY5XNqeAf9A==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [linux]\n@@ -3519,8 +3510,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-q8sINYLdIJxK/iUx9vRk5jWAWb/2O0PAbOJFwv4qkxBv4rLoN7y+otgCZ5v0xfx/zztFgk/oNY4lg5xYjIso2Q==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-4Hcx5Fg/88jV+bcTr6P0dM4unXNvKgrGJe3oK9/sgEhiW6pD2UAFjv16CCSRcWhDUAzUDqcwnD2fgg+vnAJG6g==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [linux]\n@@ -3528,8 +3519,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-moJkL9kcqxUdJSRpG7dET3UeLIciwrfP08mzBQ12ewo8K8FzxU8ZUsTIVVdNrwt01CXOdXoweGfdQLjJ4qTURA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-dUasEuskmDxUL36XA0GZqSb9233suE4wKhxrMobyFBzHUZ2tq/unzOpPjYfqDBie4QIvF8tEpAjQsLds8LWgbw==}\n engines: {node: '>= 10'}\n cpu: [arm64]\n os: [win32]\n@@ -3537,8 +3528,8 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-5iV2NKZnzxJwZZ4DM5JVbRG/nkhAbzEskKaLBB82PmYGKzaDHuMHP1lcPoD/rtYMlowZgNA/RQndfKvPBPwmXA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-eTuTpBHFvA5NFJh/iosmqCL4JOAjDrwXLSMgfKrZKjiApHMG1T/5Hb+PrsNpt+WnGp94ur7c4Dtx4xD5vlpAEw==}\n engines: {node: '>= 10'}\n cpu: [x64]\n os: [win32]\n@@ -3546,16 +3537,14 @@ packages:\n dev: true\n optional: true\n \n- /@nx/[email protected]:\n- resolution: {integrity: sha512-95Eq36bzq2hb095Zvg+Ru8o9oIeOE62tNGGpohBkZPKoK2CUTYEq0AZtdj1suXS82ukCFCyyZ/c/fwxL62HRZA==}\n+ /@nx/[email protected]:\n+ resolution: {integrity: sha512-Je9nml9NJZJ0Ga70njK4N8KNSP7MnlxiVlosMzBAWDGrgnW+A403nae9pstEC2uGKpce2T7jBqFewAy+3U6JbA==}\n dependencies:\n- '@nrwl/workspace': 16.10.0\n- '@nx/devkit': 16.10.0([email protected])\n+ '@nrwl/workspace': 17.1.3\n+ '@nx/devkit': 17.1.3([email protected])\n chalk: 4.1.2\n enquirer: 2.3.6\n- ignore: 5.2.4\n- nx: 16.10.0\n- rxjs: 7.8.1\n+ nx: 17.1.3\n tslib: 2.6.0\n yargs-parser: 21.1.1\n transitivePeerDependencies:\n@@ -3692,15 +3681,6 @@ packages:\n '@octokit/openapi-types': 18.0.0\n dev: true\n \n- /@parcel/[email protected]:\n- resolution: {integrity: sha512-cTDi+FUDBIUOBKEtj+nhiJ71AZVlkAsQFuGQTun5tV9mwQBQgZvhCzG+URPQc8myeN32yRVZEfVAPCs1RW+Jvg==}\n- engines: {node: '>= 10.0.0'}\n- requiresBuild: true\n- dependencies:\n- node-addon-api: 3.2.1\n- node-gyp-build: 4.6.0\n- dev: true\n-\n /@pkgjs/[email protected]:\n resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}\n engines: {node: '>=14'}\n@@ -3855,7 +3835,7 @@ packages:\n '@babel/preset-env': 7.23.3(@babel/[email protected])\n '@tsparticles/eslint-config': 2.0.0\n '@tsparticles/prettier-config': 2.0.0\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n '@types/webpack-bundle-analyzer': 4.6.3([email protected])\n '@types/webpack-env': 1.18.4\n '@typescript-eslint/eslint-plugin': 6.12.0(@typescript-eslint/[email protected])([email protected])([email protected])\n@@ -3906,7 +3886,7 @@ packages:\n resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==}\n dependencies:\n '@types/connect': 3.4.35\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n@@ -3914,7 +3894,7 @@ packages:\n dependencies:\n '@types/http-cache-semantics': 4.0.1\n '@types/keyv': 3.1.4\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n '@types/responselike': 1.0.0\n dev: true\n \n@@ -3931,7 +3911,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n@@ -3955,7 +3935,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-wALWQwrgiB2AWTT91CB62b6Yt0sNHpznUXeZEcnPU3DRdlDIz74x8Qg1UUYKSVFi+va5vKOLYRBI1bRKiLLKIg==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n '@types/qs': 6.9.7\n '@types/range-parser': 1.2.4\n '@types/send': 0.17.1\n@@ -3977,7 +3957,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-/7kkMsC+/kMs7gAYmmBR9P0vGTnOoLhQhyhQJSlXGI5bzTHp6xdo0TtKWQAsz6pmSAeVqKSbqeyP6hytqr9FDw==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n '@types/tough-cookie': 4.0.2\n parse5: 7.1.2\n dev: true\n@@ -3993,7 +3973,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n@@ -4018,8 +3998,8 @@ packages:\n resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==}\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-JUI82qwkRhYJYesuKSeFy46fKbhLaV9RU1gAh2PHmyoEECvlTf5UYeIivYlMszp1WT2CwJ4ziC3zoxsodhsGwg==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==}\n dev: true\n \n /@types/[email protected]:\n@@ -4036,8 +4016,8 @@ packages:\n undici-types: 5.26.5\n dev: true\n \n- /@types/[email protected]:\n- resolution: {integrity: sha512-nk5wXLAXGBKfrhLB0cyHGbSqopS+nz0BUgZkUQqSHSSgdee0kssp1IAqlQOu333bW+gMNs2QREx7iynm19Abxw==}\n+ /@types/[email protected]:\n+ resolution: {integrity: sha512-wmyg8HUhcn6ACjsn8oKYjkN/zUzQeNtMy44weTJSM6p4MMzEOuKbA3OjJ267uPCOW7Xex9dyrNTful8XTQYoDA==}\n dependencies:\n undici-types: 5.26.5\n dev: true\n@@ -4057,7 +4037,7 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n@@ -4068,14 +4048,14 @@ packages:\n resolution: {integrity: sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q==}\n dependencies:\n '@types/mime': 1.3.2\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n resolution: {integrity: sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==}\n dependencies:\n '@types/mime': 3.0.1\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n@@ -4095,7 +4075,7 @@ packages:\n /@types/[email protected]([email protected]):\n resolution: {integrity: sha512-XYU3m7oRb1tlE8YhwkKLi1xba2buNB9V4VkQtOVTfJuUm/413pE/UCMVcPDFFBwpzGkr9y1WbSEvdPjKVPt0gw==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n tapable: 2.2.1\n webpack: 5.89.0([email protected])\n transitivePeerDependencies:\n@@ -4112,14 +4092,14 @@ packages:\n /@types/[email protected]:\n resolution: {integrity: sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n \n /@types/[email protected]:\n resolution: {integrity: sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw==}\n requiresBuild: true\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n dev: true\n optional: true\n \n@@ -5620,16 +5600,6 @@ packages:\n typescript: 5.3.2\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==}\n- engines: {node: '>=14'}\n- dependencies:\n- import-fresh: 3.3.0\n- js-yaml: 4.1.0\n- parse-json: 5.2.0\n- path-type: 4.0.0\n- dev: true\n-\n /[email protected]([email protected]):\n resolution: {integrity: sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==}\n engines: {node: '>=14'}\n@@ -7910,7 +7880,7 @@ packages:\n resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}\n engines: {node: '>= 10.13.0'}\n dependencies:\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n merge-stream: 2.0.0\n supports-color: 8.1.1\n dev: true\n@@ -8143,15 +8113,14 @@ packages:\n resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==}\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-gxavfzHfJ4JL30OvMunmlm4Anw7d7Tq6tdVHzUukLdS9nWnxCN/QB21qR+VJYp5tcyXogHKbdUEGh6qmeyzxSA==}\n- engines: {node: '>=16.0.0'}\n+ /[email protected]:\n+ resolution: {integrity: sha512-Ddshct9hJrujtR7t2cAIiiiKnQCKiTvR/Ki3KhzpBNVepYtWq+dg+HxArZrezF+sYxI+OCxL00BxDHY4/H4uGg==}\n+ engines: {node: '>=18.0.0'}\n hasBin: true\n dependencies:\n- '@lerna/child-process': 7.4.2\n- '@lerna/create': 7.4.2\n+ '@lerna/create': 8.0.0([email protected])\n '@npmcli/run-script': 6.0.2\n- '@nx/devkit': 16.10.0([email protected])\n+ '@nx/devkit': 17.1.3([email protected])\n '@octokit/plugin-enterprise-rest': 6.0.1\n '@octokit/rest': 19.0.11\n byte-size: 8.1.1\n@@ -8162,7 +8131,7 @@ packages:\n conventional-changelog-angular: 7.0.0\n conventional-changelog-core: 5.0.1\n conventional-recommended-bump: 7.0.1\n- cosmiconfig: 8.2.0\n+ cosmiconfig: 8.3.6([email protected])\n dedent: 0.7.0\n envinfo: 7.8.1\n execa: 5.0.0\n@@ -8194,7 +8163,7 @@ packages:\n npm-packlist: 5.1.1\n npm-registry-fetch: 14.0.5\n npmlog: 6.0.2\n- nx: 16.10.0\n+ nx: 17.1.3\n p-map: 4.0.0\n p-map-series: 2.1.0\n p-pipe: 3.1.0\n@@ -8221,8 +8190,8 @@ packages:\n validate-npm-package-name: 5.0.0\n write-file-atomic: 5.0.1\n write-pkg: 4.0.0\n- yargs: 16.2.0\n- yargs-parser: 20.2.4\n+ yargs: 17.7.2\n+ yargs-parser: 21.1.1\n transitivePeerDependencies:\n - '@swc-node/register'\n - '@swc/core'\n@@ -8900,10 +8869,6 @@ packages:\n tslib: 2.6.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==}\n- dev: true\n-\n /[email protected]:\n resolution: {integrity: sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==}\n engines: {node: 4.x || >=6.0.0}\n@@ -8928,11 +8893,6 @@ packages:\n whatwg-url: 5.0.0\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ==}\n- hasBin: true\n- dev: true\n-\n /[email protected]:\n resolution: {integrity: sha512-dMXsYP6gc9rRbejLXmTbVRYjAHw7ppswsKyMxuxJxxOHzluIO1rGp9TOQgjFJ+2MCqcOcQTOPB/8Xwhr+7s4Eg==}\n engines: {node: ^12.13 || ^14.13 || >=16}\n@@ -9019,7 +8979,7 @@ packages:\n resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==}\n dependencies:\n hosted-git-info: 2.8.9\n- resolve: 1.22.3\n+ resolve: 1.22.8\n semver: 5.7.2\n validate-npm-package-license: 3.0.4\n dev: true\n@@ -9029,7 +8989,7 @@ packages:\n engines: {node: '>=10'}\n dependencies:\n hosted-git-info: 4.1.0\n- is-core-module: 2.13.0\n+ is-core-module: 2.13.1\n semver: 7.5.4\n validate-npm-package-license: 3.0.4\n dev: true\n@@ -9193,8 +9153,8 @@ packages:\n - debug\n dev: true\n \n- /[email protected]:\n- resolution: {integrity: sha512-gZl4iCC0Hx0Qe1VWmO4Bkeul2nttuXdPpfnlcDKSACGu3ZIo+uySqwOF8yBAxSTIf8xe2JRhgzJN1aFkuezEBg==}\n+ /[email protected]:\n+ resolution: {integrity: sha512-6LYoTt01nS1d/dvvYtRs+pEAMQmUVsd2fr/a8+X1cDjWrb8wsf1O3DwlBTqKOXOazpS3eOr0Ukc9N1svbu7uXA==}\n hasBin: true\n requiresBuild: true\n peerDependencies:\n@@ -9206,8 +9166,7 @@ packages:\n '@swc/core':\n optional: true\n dependencies:\n- '@nrwl/tao': 16.10.0\n- '@parcel/watcher': 2.0.4\n+ '@nrwl/tao': 17.1.3\n '@yarnpkg/lockfile': 1.1.0\n '@yarnpkg/parsers': 3.0.0-rc.46\n '@zkochan/js-yaml': 0.0.6\n@@ -9243,16 +9202,16 @@ packages:\n yargs: 17.7.2\n yargs-parser: 21.1.1\n optionalDependencies:\n- '@nx/nx-darwin-arm64': 16.10.0\n- '@nx/nx-darwin-x64': 16.10.0\n- '@nx/nx-freebsd-x64': 16.10.0\n- '@nx/nx-linux-arm-gnueabihf': 16.10.0\n- '@nx/nx-linux-arm64-gnu': 16.10.0\n- '@nx/nx-linux-arm64-musl': 16.10.0\n- '@nx/nx-linux-x64-gnu': 16.10.0\n- '@nx/nx-linux-x64-musl': 16.10.0\n- '@nx/nx-win32-arm64-msvc': 16.10.0\n- '@nx/nx-win32-x64-msvc': 16.10.0\n+ '@nx/nx-darwin-arm64': 17.1.3\n+ '@nx/nx-darwin-x64': 17.1.3\n+ '@nx/nx-freebsd-x64': 17.1.3\n+ '@nx/nx-linux-arm-gnueabihf': 17.1.3\n+ '@nx/nx-linux-arm64-gnu': 17.1.3\n+ '@nx/nx-linux-arm64-musl': 17.1.3\n+ '@nx/nx-linux-x64-gnu': 17.1.3\n+ '@nx/nx-linux-x64-musl': 17.1.3\n+ '@nx/nx-win32-arm64-msvc': 17.1.3\n+ '@nx/nx-win32-x64-msvc': 17.1.3\n transitivePeerDependencies:\n - debug\n dev: true\n@@ -11297,7 +11256,7 @@ packages:\n yn: 3.1.1\n dev: true\n \n- /[email protected](@types/[email protected])([email protected]):\n+ /[email protected](@types/[email protected])([email protected]):\n resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}\n hasBin: true\n peerDependencies:\n@@ -11316,7 +11275,7 @@ packages:\n '@tsconfig/node12': 1.0.11\n '@tsconfig/node14': 1.0.3\n '@tsconfig/node16': 1.0.4\n- '@types/node': 20.9.3\n+ '@types/node': 20.9.4\n acorn: 8.10.0\n acorn-walk: 8.2.0\n arg: 4.1.3\n", "hollowknight.ts": "@@ -91,8 +91,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: true,\n- lineColor: \"rgba(255,255,255,0.2)\",\n- lineWidth: 0.5,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 0.5,\n+ opacity: 0.2,\n+ },\n },\n enable: true,\n move: {\n", "imagesDirections.ts": "@@ -86,8 +86,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: false,\n- lineColor: \"#ffffff\",\n- lineWidth: 0.5,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 0.5,\n+ opacity: 0.2,\n+ },\n },\n move: {\n radius: 10,\n", "localPolygonMask.ts": "@@ -55,8 +55,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: true,\n- lineColor: \"rgba(255,255,255,1)\",\n- lineWidth: 1,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 0.5,\n+ opacity: 0.2,\n+ },\n },\n enable: true,\n move: {\n", "multiplePolygonMasks.ts": "@@ -97,8 +97,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: true,\n- lineColor: \"rgba(255,255,255,0.2)\",\n- lineWidth: 0.5,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 0.5,\n+ opacity: 0.2,\n+ },\n },\n enable: true,\n move: {\n", "pathPolygonMask.ts": "@@ -58,8 +58,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: true,\n- lineColor: \"rgba(255,255,255,0.2)\",\n- lineWidth: 0.5,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 0.5,\n+ opacity: 0.2,\n+ },\n },\n enable: true,\n move: {\n", "polygonMask.ts": "@@ -97,8 +97,11 @@ const options: ISourceOptions = {\n polygon: {\n draw: {\n enable: true,\n- lineColor: \"rgba(255,255,255,0.2)\",\n- lineWidth: 1,\n+ stroke: {\n+ color: \"#fff\",\n+ width: 1,\n+ opacity: 0.2,\n+ },\n },\n enable: true,\n move: {\n"}
chore: release new beta
9f3accf405f649438624c80096b364e41c3cf80d
chore
https://github.com/pmndrs/react-spring/commit/9f3accf405f649438624c80096b364e41c3cf80d
release new beta
{"package.json": "@@ -1,6 +1,6 @@\n {\n \"name\": \"react-spring\",\n- \"version\": \"9.0.0-beta.15\",\n+ \"version\": \"9.0.0-beta.16\",\n \"private\": true,\n \"main\": \"dist/src/web.cjs.js\",\n \"types\": \"dist/src/web.d.ts\",\n"}
fix(pyspark): specialize implementation of `has_operation`
508234663cc5f633bf7ee9de4c523f06c23d0b2c
fix
https://github.com/rohankumardubey/ibis/commit/508234663cc5f633bf7ee9de4c523f06c23d0b2c
specialize implementation of `has_operation`
{"__init__.py": "@@ -10,6 +10,7 @@ from pyspark.sql.column import Column\n \n if TYPE_CHECKING:\n import ibis.expr.types as ir\n+ import ibis.expr.operations as ops\n \n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n@@ -582,3 +583,6 @@ class Backend(BaseSQLBackend):\n self._fully_qualified_name(name, database), maybe_noscan\n )\n return self.raw_sql(stmt)\n+\n+ def has_operation(cls, operation: type[ops.Value]) -> bool:\n+ return operation in PySparkExprTranslator._registry.keys()\n"}
docs(cloud): move Cube IDE to Developer Tools to match OSS docs
7c8cabe84e04587a260054e801c1b2a3ae5be2e2
docs
https://github.com/wzhiqing/cube/commit/7c8cabe84e04587a260054e801c1b2a3ae5be2e2
move Cube IDE to Developer Tools to match OSS docs
{"Cube-IDE.md": "@@ -1,7 +1,7 @@\n ---\n title: Cube IDE\n permalink: /cloud/cube-ide\n-category: Cube IDE\n+category: Developer Tools\n menuOrder: 1\n ---\n \n", "MainMenu.tsx": "@@ -19,7 +19,7 @@ import { layout } from '../../theme';\n const menuOrderCloud = [\n 'Cube Cloud Getting Started',\n 'Configuration',\n- 'Cube IDE',\n+ 'Developer Tools',\n 'Deploys',\n 'Inspecting Queries'\n ];\n"}
docs: remove duplicate "or"
b6ef3ccdc1c4d27e9c7ea010e8e545090b89e5b6
docs
https://github.com/rohankumardubey/ibis/commit/b6ef3ccdc1c4d27e9c7ea010e8e545090b89e5b6
remove duplicate "or"
{"sessionize.md": "@@ -4,7 +4,7 @@ Suppose you have entities (users, objects, actions, etc) that have event logs th\n \n You might be interested in partitioning these logs by something called **sessions**, which can be defined as groups of consecutive event records without long interruptions for a given entity.\n \n-In the case of a user portal, it might be grouping the navigation events that result in completing a task or or buying a product.\n+In the case of a user portal, it might be grouping the navigation events that result in completing a task or buying a product.\n For online games, it might be a the grouping of activity events of a given user playing the game while remaining logged in.\n \n Sessionization can also be useful on longer time scales, for instance to reconstruct active subscription data from a raw payment or activity log, so as to model customer churn.\n"}
feat(snowflake): implement `TimestampFromYMDHMS`
1eba8bef192e93cc97ed8b49b8c60f2bc8e118f4
feat
https://github.com/ibis-project/ibis/commit/1eba8bef192e93cc97ed8b49b8c60f2bc8e118f4
implement `TimestampFromYMDHMS`
{"registry.py": "@@ -233,6 +233,7 @@ operation_registry.update(\n ),\n 1,\n ),\n+ ops.TimestampFromYMDHMS: fixed_arity(sa.func.timestamp_from_parts, 6),\n }\n )\n \n@@ -256,7 +257,6 @@ _invalid_operations = {\n ops.IntervalFromInteger,\n ops.TimestampDiff,\n ops.TimestampFromUNIX,\n- ops.TimestampFromYMDHMS,\n }\n \n operation_registry = {\n", "test_temporal.py": "@@ -781,7 +781,7 @@ def test_date_literal(con):\n assert result.strftime('%Y-%m-%d') == '2022-02-04'\n \n \[email protected]([\"pandas\", \"datafusion\", \"mysql\", \"dask\", \"pyspark\", \"snowflake\"])\[email protected]([\"pandas\", \"datafusion\", \"mysql\", \"dask\", \"pyspark\"])\n @pytest.mark.notyet([\"clickhouse\", \"impala\"])\n def test_timestamp_literal(con):\n expr = ibis.timestamp(2022, 2, 4, 16, 20, 0)\n@@ -824,6 +824,20 @@ def test_date_column_from_ymd(con, alltypes, df):\n tm.assert_series_equal(golden, result.timestamp_col)\n \n \[email protected]([\"pandas\", \"datafusion\", \"mysql\", \"dask\", \"pyspark\"])\[email protected]([\"clickhouse\", \"impala\"])\n+def test_timestamp_column_from_ymdhms(con, alltypes, df):\n+ c = alltypes.timestamp_col\n+ expr = ibis.timestamp(\n+ c.year(), c.month(), c.day(), c.hour(), c.minute(), c.second()\n+ )\n+ tbl = alltypes[expr.name('timestamp_col')]\n+ result = con.execute(tbl)\n+\n+ golden = df.timestamp_col.dt.floor(\"s\").astype('datetime64[ns]')\n+ tm.assert_series_equal(golden, result.timestamp_col)\n+\n+\n @pytest.mark.notimpl([\"datafusion\", \"impala\"])\n def test_date_scalar_from_iso(con):\n expr = ibis.literal('2022-02-24')\n", "api.py": "@@ -585,6 +585,7 @@ def timestamp(\n @timestamp.register(np.floating)\n @timestamp.register(int)\n @timestamp.register(float)\[email protected](ir.IntegerValue)\n def _timestamp_from_ymdhms(\n value, *args, timezone: str | None = None\n ) -> ir.TimestampScalar:\n"}
chore(release): update internal dependencies to use tilde [skip ci]
c562f1317707a6a200fcf9c689a76c8af32370b7
chore
https://github.com/mikro-orm/mikro-orm/commit/c562f1317707a6a200fcf9c689a76c8af32370b7
update internal dependencies to use tilde [skip ci]
{"package.json": "@@ -58,7 +58,7 @@\n \"access\": \"public\"\n },\n \"dependencies\": {\n- \"@mikro-orm/knex\": \"^5.7.6\",\n+ \"@mikro-orm/knex\": \"~5.7.6\",\n \"fs-extra\": \"11.1.1\",\n \"sqlite3\": \"5.1.6\",\n \"sqlstring-sqlite\": \"0.1.1\"\n", "yarn.lock": "Binary files a/yarn.lock and b/yarn.lock differ\n"}
feat(core): support indexes in entity generator (#437) Closes #421
90c0162af0e49574e1fdfa87504634716d1dba46
feat
https://github.com/mikro-orm/mikro-orm/commit/90c0162af0e49574e1fdfa87504634716d1dba46
support indexes in entity generator (#437) Closes #421
{"MongoDriver.ts": "@@ -1,7 +1,7 @@\n import { ClientSession, ObjectId } from 'mongodb';\n import { DatabaseDriver } from './DatabaseDriver';\n import { MongoConnection } from '../connections/MongoConnection';\n-import { EntityData, AnyEntity, FilterQuery, EntityMetadata } from '../typings';\n+import { EntityData, AnyEntity, FilterQuery, EntityMetadata, EntityProperty } from '../typings';\n import { Configuration, Utils } from '../utils';\n import { MongoPlatform } from '../platforms/MongoPlatform';\n import { FindOneOptions, FindOptions } from './IDatabaseDriver';\n@@ -91,25 +91,45 @@ export class MongoDriver extends DatabaseDriver<MongoConnection> {\n await this.createCollections();\n const promises: Promise<string>[] = [];\n \n- const createIndexes = (meta: EntityMetadata, type: 'indexes' | 'uniques') => {\n- meta[type].forEach(index => {\n- const properties = Utils.flatten(Utils.asArray(index.properties).map(prop => meta.properties[prop].fieldNames));\n- promises.push(this.getConnection('write').getCollection(meta.name).createIndex(properties, {\n- name: index.name,\n- unique: type === 'uniques',\n- ...(index.options || {}),\n- }));\n- });\n- };\n-\n for (const meta of Object.values(this.metadata.getAll())) {\n- createIndexes(meta, 'indexes');\n- createIndexes(meta, 'uniques');\n+ promises.push(...this.createIndexes(meta, 'indexes'));\n+ promises.push(...this.createIndexes(meta, 'uniques'));\n+\n+ for (const prop of Object.values(meta.properties)) {\n+ promises.push(...this.createPropertyIndexes(meta, prop, 'index'));\n+ promises.push(...this.createPropertyIndexes(meta, prop, 'unique'));\n+ }\n }\n \n await Promise.all(promises);\n }\n \n+ private createIndexes(meta: EntityMetadata, type: 'indexes' | 'uniques') {\n+ const promises: Promise<string>[] = [];\n+\n+ meta[type].forEach(index => {\n+ const properties = Utils.flatten(Utils.asArray(index.properties).map(prop => meta.properties[prop].fieldNames));\n+ promises.push(this.getConnection('write').getCollection(meta.name).createIndex(properties, {\n+ name: index.name,\n+ unique: type === 'uniques',\n+ ...(index.options || {}),\n+ }));\n+ });\n+\n+ return promises;\n+ }\n+\n+ private createPropertyIndexes(meta: EntityMetadata, prop: EntityProperty, type: 'index' | 'unique') {\n+ if (!prop[type]) {\n+ return [];\n+ }\n+\n+ return [this.getConnection('write').getCollection(meta.name).createIndex(prop.fieldNames, {\n+ name: (Utils.isString(prop[type]) ? prop[type] : undefined) as string,\n+ unique: type === 'unique',\n+ })];\n+ }\n+\n private renameFields<T>(entityName: string, data: T): T {\n data = Object.assign({}, data); // copy first\n Utils.renameKey(data, 'id', '_id');\n", "MetadataDiscovery.ts": "@@ -43,6 +43,7 @@ export class MetadataDiscovery {\n filtered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initUnsigned(prop)));\n filtered.forEach(meta => this.autoWireBidirectionalProperties(meta));\n filtered.forEach(meta => this.discovered.push(...this.processEntity(meta)));\n+ this.discovered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initIndexes(meta, prop)));\n \n const diff = Date.now() - startTime;\n this.logger.log('discovery', `- entity discovery finished after ${chalk.green(`${diff} ms`)}`);\n@@ -550,6 +551,29 @@ export class MetadataDiscovery {\n prop.unsigned = (prop.primary || prop.unsigned) && (prop.type === 'number' || this.platform.isBigIntProperty(prop));\n }\n \n+ private initIndexes<T>(meta: EntityMetadata<T>, prop: EntityProperty<T>): void {\n+ const simpleIndex = meta.indexes.find(index => index.properties === prop.name && !index.options && !index.type);\n+ const simpleUnique = meta.uniques.find(index => index.properties === prop.name && !index.options);\n+ const owner = prop.reference === ReferenceType.MANY_TO_ONE || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner);\n+\n+ if (!prop.index && simpleIndex) {\n+ Utils.defaultValue(simpleIndex, 'name', true);\n+ prop.index = simpleIndex.name;\n+ meta.indexes.splice(meta.indexes.indexOf(simpleIndex), 1);\n+ }\n+\n+ if (!prop.unique && simpleUnique) {\n+ Utils.defaultValue(simpleUnique, 'name', true);\n+ prop.unique = simpleUnique.name;\n+ meta.uniques.splice(meta.uniques.indexOf(simpleUnique), 1);\n+ }\n+\n+ if (owner && this.metadata.get(prop.type).compositePK) {\n+ meta.indexes.push({ properties: prop.name });\n+ prop.index = false;\n+ }\n+ }\n+\n private getEntityClassOrSchema(path: string, name: string) {\n const exports = require(path);\n const target = exports.default || exports[name];\n", "DatabaseTable.ts": "@@ -1,9 +1,14 @@\n-import { Dictionary } from '../typings';\n+import { Dictionary, EntityMetadata, EntityProperty } from '../typings';\n+import { EntitySchema } from './EntitySchema';\n+import { Utils } from '../utils';\n+import { ReferenceType } from '../entity';\n+import { NamingStrategy } from '../naming-strategy';\n+import { SchemaHelper } from './SchemaHelper';\n \n export class DatabaseTable {\n \n private columns!: Dictionary<Column>;\n- private indexes!: Dictionary<Index[]>;\n+ private indexes!: Index[];\n private foreignKeys!: Dictionary<ForeignKey>;\n \n constructor(readonly name: string,\n@@ -17,15 +22,34 @@ export class DatabaseTable {\n return this.columns[name];\n }\n \n- init(cols: Column[], indexes: Dictionary<Index[]>, pks: string[], fks: Dictionary<ForeignKey>): void {\n+ getIndexes(): Dictionary<Index[]> {\n+ return this.indexes.reduce((o, index) => {\n+ if (index.primary) {\n+ return o;\n+ }\n+\n+ o[index.keyName] = o[index.keyName] || [];\n+ o[index.keyName].push(index);\n+\n+ return o;\n+ }, {} as Dictionary<Index[]>);\n+ }\n+\n+ init(cols: Column[], indexes: Index[], pks: string[], fks: Dictionary<ForeignKey>): void {\n this.indexes = indexes;\n this.foreignKeys = fks;\n+\n+ const map = this.getIndexes();\n+ Object.keys(map).forEach(key => {\n+ map[key].forEach(index => index.composite = map[key].length > 1);\n+ });\n+\n this.columns = cols.reduce((o, v) => {\n- const index = indexes[v.name] || [];\n+ const index = indexes.filter(i => i.columnName === v.name);\n v.primary = pks.includes(v.name);\n v.unique = index.some(i => i.unique && !i.primary);\n v.fk = fks[v.name];\n- v.indexes = index.filter(i => !i.primary);\n+ v.indexes = index.filter(i => !i.primary && !i.composite);\n v.defaultValue = v.defaultValue && v.defaultValue.toString().startsWith('nextval(') ? null : v.defaultValue;\n o[v.name] = v;\n \n@@ -33,6 +57,116 @@ export class DatabaseTable {\n }, {} as any);\n }\n \n+ getEntityDeclaration(namingStrategy: NamingStrategy, schemaHelper: SchemaHelper): EntityMetadata {\n+ const name = namingStrategy.getClassName(this.name, '_');\n+ const schema = new EntitySchema({ name, collection: this.name });\n+ const indexes = this.getIndexes();\n+ const compositeFkIndexes: Dictionary<{ keyName: string }> = {};\n+\n+ Object.keys(indexes)\n+ .filter(name => indexes[name].length > 1)\n+ .forEach(name => {\n+ const properties = indexes[name].map(index => this.getPropertyName(this.getColumn(index.columnName)!));\n+ const index = { name, properties: Utils.unique(properties) };\n+\n+ if (index.properties.length === 1) {\n+ compositeFkIndexes[index.properties[0]] = { keyName: name };\n+ return;\n+ }\n+\n+ if (indexes[index.name][0].unique) {\n+ schema.addUnique(index);\n+ } else {\n+ schema.addIndex(index);\n+ }\n+ });\n+\n+ this.getColumns().forEach(column => this.getPropertyDeclaration(column, namingStrategy, schemaHelper, compositeFkIndexes, schema));\n+\n+ return schema.init().meta;\n+ }\n+\n+ private getPropertyDeclaration(column: Column, namingStrategy: NamingStrategy, schemaHelper: SchemaHelper, compositeFkIndexes: Dictionary<{ keyName: string }>, schema: EntitySchema) {\n+ const reference = this.getReferenceType(column);\n+ const prop = this.getPropertyName(column);\n+ const type = this.getPropertyType(namingStrategy, schemaHelper, column);\n+ const fkOptions: Partial<EntityProperty> = {};\n+ const index = compositeFkIndexes[prop] || column.indexes.find(i => !i.unique);\n+ const unique = column.indexes.find(i => i.unique);\n+\n+ if (column.fk) {\n+ fkOptions.referencedTableName = column.fk.referencedTableName;\n+ fkOptions.referencedColumnNames = [column.fk.referencedColumnName];\n+ fkOptions.onUpdateIntegrity = column.fk.updateRule.toLowerCase();\n+ fkOptions.onDelete = column.fk.deleteRule.toLowerCase();\n+ }\n+\n+ schema.addProperty(prop, type, {\n+ reference,\n+ columnType: column.type,\n+ default: this.getPropertyDefaultValue(schemaHelper, column, type),\n+ nullable: column.nullable,\n+ primary: column.primary,\n+ fieldName: column.name,\n+ length: column.maxLength,\n+ index: index ? index.keyName : undefined,\n+ unique: unique ? unique.keyName : undefined,\n+ ...fkOptions,\n+ });\n+ }\n+\n+ private getReferenceType(column: Column): ReferenceType {\n+ if (column.fk && column.unique) {\n+ return ReferenceType.ONE_TO_ONE;\n+ }\n+\n+ if (column.fk) {\n+ return ReferenceType.MANY_TO_ONE;\n+ }\n+\n+ return ReferenceType.SCALAR;\n+ }\n+\n+ private getPropertyName(column: Column): string {\n+ let field = column.name;\n+\n+ if (column.fk) {\n+ field = field.replace(new RegExp(`_${column.fk.referencedColumnName}$`), '');\n+ }\n+\n+ return field.replace(/_(\\w)/g, m => m[1].toUpperCase()).replace(/_+/g, '');\n+ }\n+\n+ private getPropertyType(namingStrategy: NamingStrategy, schemaHelper: SchemaHelper, column: Column, defaultType = 'string'): string {\n+ if (column.fk) {\n+ return namingStrategy.getClassName(column.fk.referencedTableName, '_');\n+ }\n+\n+ return schemaHelper.getTypeFromDefinition(column.type, defaultType);\n+ }\n+\n+ private getPropertyDefaultValue(schemaHelper: SchemaHelper, column: Column, propType: string): any {\n+ if (!column.defaultValue) {\n+ return;\n+ }\n+\n+ const val = schemaHelper.normalizeDefaultValue(column.defaultValue, column.maxLength);\n+\n+ if (column.nullable && val === 'null') {\n+ return;\n+ }\n+\n+ if (propType === 'boolean') {\n+ return !!column.defaultValue;\n+ }\n+\n+ if (propType === 'number') {\n+ return +column.defaultValue;\n+ }\n+\n+ return '' + val;\n+ }\n+\n }\n \n export interface Column {\n@@ -62,4 +196,5 @@ export interface Index {\n keyName: string;\n unique: boolean;\n primary: boolean;\n+ composite?: boolean;\n }\n", "EntityGenerator.ts": "@@ -1,10 +1,10 @@\n-import { CodeBlockWriter, IndentationText, Project, QuoteKind, SourceFile } from 'ts-morph';\n+import { IndentationText, Project, QuoteKind, SourceFile } from 'ts-morph';\n import { ensureDir, writeFile } from 'fs-extra';\n \n-import { AbstractSqlDriver, Configuration, DatabaseSchema, Dictionary, Utils } from '..';\n+import { AbstractSqlDriver, Configuration, DatabaseSchema, Dictionary, ReferenceType, Utils } from '..';\n import { Platform } from '../platforms';\n import { EntityProperty } from '../typings';\n-import { Column, DatabaseTable } from './DatabaseTable';\n+import { DatabaseTable } from './DatabaseTable';\n \n export class EntityGenerator {\n \n@@ -43,120 +43,142 @@ export class EntityGenerator {\n }\n \n async createEntity(table: DatabaseTable): Promise<void> {\n- const properties: [string, string][] = [];\n- const entity = this.project.createSourceFile(this.namingStrategy.getClassName(table.name, '_') + '.ts', writer => {\n- writer.writeLine(`import { Entity, PrimaryKey, Property, ManyToOne, OneToMany, OneToOne, ManyToMany, Cascade } from 'mikro-orm';`);\n+ const meta = table.getEntityDeclaration(this.namingStrategy, this.helper);\n+ const entity = this.project.createSourceFile(meta.className + '.ts', writer => {\n+ writer.writeLine(`import { Entity, PrimaryKey, Property, ManyToOne, OneToMany, OneToOne, ManyToMany, Cascade, Index, Unique } from 'mikro-orm';`);\n writer.blankLine();\n writer.writeLine('@Entity()');\n- writer.write(`export class ${this.namingStrategy.getClassName(table.name, '_')}`);\n- writer.block(() => table.getColumns().forEach(column => this.createProperty(writer, column, properties)));\n+\n+ meta.indexes.forEach(index => {\n+ const properties = Utils.asArray(index.properties).map(prop => `'${prop}'`);\n+ writer.writeLine(`@Index({ name: '${index.name}', properties: [${properties.join(', ')}] })`);\n+ });\n+\n+ meta.uniques.forEach(index => {\n+ const properties = Utils.asArray(index.properties).map(prop => `'${prop}'`);\n+ writer.writeLine(`@Unique({ name: '${index.name}', properties: [${properties.join(', ')}] })`);\n+ });\n+\n+ writer.write(`export class ${meta.className}`);\n+ writer.block(() => Object.values(meta.properties).forEach(prop => {\n+ const decorator = this.getPropertyDecorator(prop);\n+ const definition = this.getPropertyDefinition(prop);\n+ writer.blankLineIfLastNot();\n+ writer.writeLine(decorator);\n+ writer.writeLine(definition);\n+ writer.blankLine();\n+ }));\n writer.write('');\n });\n \n this.sources.push(entity);\n }\n \n- createProperty(writer: CodeBlockWriter, column: Column, properties: [string, string][]): void {\n- const prop = this.getPropertyName(column);\n- const type = this.getPropertyType(column);\n- const columnType = this.getPropertyType(column, '__false') === '__false' ? column.type : undefined;\n- const defaultValue = this.getPropertyDefaultValue(column, type);\n- const decorator = this.getPropertyDecorator(prop, column, type, defaultValue, columnType);\n- const definition = this.getPropertyDefinition(column, prop, type, defaultValue);\n-\n- // in case of composite keys in references, we get duplicates, so ignore them here\n- if (properties.find(prop => prop[0] === decorator && prop[1] === definition )) {\n- return;\n- }\n-\n- properties.push([decorator, definition]);\n- writer.blankLineIfLastNot();\n- writer.writeLine(decorator);\n- writer.writeLine(definition);\n- writer.blankLine();\n- }\n-\n- private getPropertyDefinition(column: Column, prop: string, type: string, defaultValue: any): string {\n+ private getPropertyDefinition(prop: EntityProperty): string {\n // string defaults are usually things like SQL functions\n- const useDefault = defaultValue && typeof defaultValue !== 'string';\n- const optional = column.nullable ? '?' : (useDefault ? '' : '!');\n- const ret = `${prop}${optional}: ${type}`;\n+ const useDefault = prop.default && typeof prop.default !== 'string';\n+ const optional = prop.nullable ? '?' : (useDefault ? '' : '!');\n+ const ret = `${prop.name}${optional}: ${prop.type}`;\n \n if (!useDefault) {\n return ret + ';';\n }\n \n- return `${ret} = ${defaultValue};`;\n+ return `${ret} = ${prop.default};`;\n }\n \n- private getPropertyDecorator(prop: string, column: Column, type: string, defaultValue: any, columnType?: string): string {\n- const options = {} as any;\n- const decorator = this.getDecoratorType(column);\n+ private getPropertyDecorator(prop: EntityProperty): string {\n+ const options = {} as Dictionary;\n+ const columnType = this.helper.getTypeFromDefinition(prop.columnTypes[0], '__false') === '__false' ? prop.columnTypes[0] : undefined;\n+ let decorator = this.getDecoratorType(prop);\n \n- if (column.fk) {\n- this.getForeignKeyDecoratorOptions(options, column, prop);\n+ if (prop.reference !== ReferenceType.SCALAR) {\n+ this.getForeignKeyDecoratorOptions(options, prop);\n } else {\n- this.getScalarPropertyDecoratorOptions(type, column, options, prop, columnType);\n+ this.getScalarPropertyDecoratorOptions(options, prop, columnType);\n }\n \n- this.getCommonDecoratorOptions(column, options, defaultValue, columnType);\n+ this.getCommonDecoratorOptions(options, prop, columnType);\n+ const indexes = this.getPropertyIndexes(prop, options);\n+ decorator = [...indexes.sort(), decorator].join('\\n');\n \n if (Object.keys(options).length === 0) {\n- return decorator + '()';\n+ return `${decorator}()`;\n }\n \n return `${decorator}({ ${Object.entries(options).map(([opt, val]) => `${opt}: ${val}`).join(', ')} })`;\n }\n \n- private getCommonDecoratorOptions(column: Column, options: Dictionary, defaultValue: any, columnType?: string) {\n+ private getPropertyIndexes(prop: EntityProperty, options: Dictionary): string[] {\n+ if (prop.reference === ReferenceType.SCALAR) {\n+ const ret: string[] = [];\n+\n+ if (prop.index) {\n+ ret.push(`@Index({ name: '${prop.index}' })`);\n+ }\n+\n+ if (prop.unique) {\n+ ret.push(`@Unique({ name: '${prop.unique}' })`);\n+ }\n+\n+ return ret;\n+ }\n+\n+ if (prop.index) {\n+ options.index = `'${prop.index}'`;\n+ }\n+\n+ if (prop.unique) {\n+ options.unique = `'${prop.unique}'`;\n+ }\n+\n+ return [];\n+ }\n+\n+ private getCommonDecoratorOptions(options: Dictionary, prop: EntityProperty, columnType: string | undefined) {\n if (columnType) {\n options.columnType = `'${columnType}'`;\n }\n \n- if (column.nullable) {\n+ if (prop.nullable) {\n options.nullable = true;\n }\n \n- if (defaultValue && typeof defaultValue === 'string') {\n- options.default = `\\`${defaultValue}\\``;\n+ if (prop.default && typeof prop.default === 'string') {\n+ options.default = `\\`${prop.default}\\``;\n }\n }\n \n- private getScalarPropertyDecoratorOptions(type: string, column: Column, options: Dictionary, prop: string, columnType?: string): void {\n- const defaultColumnType = this.helper.getTypeDefinition({\n- type,\n- length: column.maxLength,\n- } as EntityProperty).replace(/\\(\\d+\\)/, '');\n+ private getScalarPropertyDecoratorOptions(options: Dictionary, prop: EntityProperty, columnType: string | undefined): void {\n+ const defaultColumnType = this.helper.getTypeDefinition(prop).replace(/\\(\\d+\\)/, '');\n \n- if (column.type !== defaultColumnType && column.type !== columnType) {\n- options.type = `'${column.type}'`;\n+ if (!columnType && prop.columnTypes[0] !== defaultColumnType && prop.type !== columnType) {\n+ options.columnType = `'${prop.columnTypes[0]}'`;\n }\n \n- if (column.name !== this.namingStrategy.propertyToColumnName(prop)) {\n- options.fieldName = `'${column.name}'`;\n+ if (prop.fieldNames[0] !== this.namingStrategy.propertyToColumnName(prop.name)) {\n+ options.fieldName = `'${prop.fieldNames[0]}'`;\n }\n \n- if (column.maxLength && column.type !== 'enum') {\n- options.length = column.maxLength;\n+ if (prop.length && prop.columnTypes[0] !== 'enum') {\n+ options.length = prop.length;\n }\n }\n \n- private getForeignKeyDecoratorOptions(options: Dictionary, column: Column, prop: string) {\n- options.entity = `() => ${this.namingStrategy.getClassName(column.fk.referencedTableName, '_')}`;\n+ private getForeignKeyDecoratorOptions(options: Dictionary, prop: EntityProperty) {\n+ options.entity = `() => ${this.namingStrategy.getClassName(prop.referencedTableName, '_')}`;\n \n- if (column.name !== this.namingStrategy.joinKeyColumnName(prop, column.fk.referencedColumnName)) {\n- options.fieldName = `'${column.name}'`;\n+ if (prop.fieldNames[0] !== this.namingStrategy.joinKeyColumnName(prop.name, prop.referencedColumnNames[0])) {\n+ options.fieldName = `'${prop.fieldNames[0]}'`;\n }\n \n const cascade = ['Cascade.MERGE'];\n- const onUpdate = column.fk.updateRule.toLowerCase();\n- const onDelete = column.fk.deleteRule.toLowerCase();\n \n- if (onUpdate === 'cascade') {\n+ if (prop.onUpdateIntegrity === 'cascade') {\n cascade.push('Cascade.PERSIST');\n }\n \n- if (onDelete === 'cascade') {\n+ if (prop.onDelete === 'cascade') {\n cascade.push('Cascade.REMOVE');\n }\n \n@@ -169,65 +191,25 @@ export class EntityGenerator {\n options.cascade = `[${cascade.sort().join(', ')}]`;\n }\n \n- if (column.primary) {\n+ if (prop.primary) {\n options.primary = true;\n }\n }\n \n- private getDecoratorType(column: Column): string {\n- if (column.fk && column.unique) {\n+ private getDecoratorType(prop: EntityProperty): string {\n+ if (prop.reference === ReferenceType.ONE_TO_ONE) {\n return '@OneToOne';\n }\n \n- if (column.fk) {\n+ if (prop.reference === ReferenceType.MANY_TO_ONE) {\n return '@ManyToOne';\n }\n \n- if (column.primary) {\n+ if (prop.primary) {\n return '@PrimaryKey';\n }\n \n return '@Property';\n }\n \n- private getPropertyName(column: Column): string {\n- let field = column.name;\n-\n- if (column.fk) {\n- field = field.replace(new RegExp(`_${column.fk.referencedColumnName}$`), '');\n- }\n-\n- return field.replace(/_(\\w)/g, m => m[1].toUpperCase()).replace(/_+/g, '');\n- }\n-\n- private getPropertyType(column: Column, defaultType = 'string'): string {\n- if (column.fk) {\n- return this.namingStrategy.getClassName(column.fk.referencedTableName, '_');\n- }\n-\n- return this.helper.getTypeFromDefinition(column.type, defaultType);\n- }\n-\n- private getPropertyDefaultValue(column: Column, propType: string): any {\n- if (!column.defaultValue) {\n- return;\n- }\n-\n- const val = this.helper.normalizeDefaultValue(column.defaultValue, column.maxLength);\n-\n- if (column.nullable && val === 'null') {\n- return;\n- }\n-\n- if (propType === 'boolean') {\n- return !!column.defaultValue;\n- }\n-\n- if (propType === 'number') {\n- return +column.defaultValue;\n- }\n-\n- return '' + val;\n- }\n-\n }\n", "EntitySchema.ts": "@@ -110,8 +110,8 @@ export class EntitySchema<T extends AnyEntity<T> = AnyEntity, U extends AnyEntit\n addOneToOne<K = object>(name: string & keyof T, type: TypeType, options: OneToOneOptions<K>): void {\n const prop = { reference: ReferenceType.ONE_TO_ONE, cascade: [Cascade.PERSIST, Cascade.MERGE], ...options };\n Utils.defaultValue(prop, 'nullable', prop.cascade.includes(Cascade.REMOVE) || prop.cascade.includes(Cascade.ALL));\n- prop.owner = prop.owner || !!prop.inversedBy || !prop.mappedBy;\n- prop.unique = prop.owner;\n+ Utils.defaultValue(prop, 'owner', !!prop.inversedBy || !prop.mappedBy);\n+ Utils.defaultValue(prop, 'unique', prop.owner);\n \n if (prop.owner && options.mappedBy) {\n Utils.renameKey(prop, 'mappedBy', 'inversedBy');\n", "MySqlSchemaHelper.ts": "@@ -1,8 +1,8 @@\n import { CreateTableBuilder } from 'knex';\n import { IsSame, SchemaHelper } from './SchemaHelper';\n-import { Dictionary, EntityProperty } from '../typings';\n+import { EntityProperty } from '../typings';\n import { AbstractSqlConnection } from '../connections/AbstractSqlConnection';\n-import { Column } from './DatabaseTable';\n+import { Column, Index } from './DatabaseTable';\n \n export class MySqlSchemaHelper extends SchemaHelper {\n \n@@ -87,21 +87,16 @@ export class MySqlSchemaHelper extends SchemaHelper {\n }));\n }\n \n- async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Dictionary<any[]>> {\n+ async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Index[]> {\n const sql = `show index from \\`${tableName}\\``;\n const indexes = await connection.execute<any[]>(sql);\n \n- return indexes.reduce((ret, index: any) => {\n- ret[index.Column_name] = ret[index.Column_name] || [];\n- ret[index.Column_name].push({\n- columnName: index.Column_name,\n- keyName: index.Key_name,\n- unique: !index.Non_unique,\n- primary: index.Key_name === 'PRIMARY',\n- });\n-\n- return ret;\n- }, {});\n+ return indexes.map(index => ({\n+ columnName: index.Column_name,\n+ keyName: index.Key_name,\n+ unique: !index.Non_unique,\n+ primary: index.Key_name === 'PRIMARY',\n+ }));\n }\n \n isSame(prop: EntityProperty, column: Column, idx?: number): IsSame {\n", "PostgreSqlSchemaHelper.ts": "@@ -1,7 +1,7 @@\n import { IsSame, SchemaHelper } from './SchemaHelper';\n-import { Dictionary, EntityProperty } from '../typings';\n+import { EntityProperty } from '../typings';\n import { AbstractSqlConnection } from '../connections/AbstractSqlConnection';\n-import { Column } from './DatabaseTable';\n+import { Column, Index } from './DatabaseTable';\n \n export class PostgreSqlSchemaHelper extends SchemaHelper {\n \n@@ -81,21 +81,16 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n }));\n }\n \n- async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName: string): Promise<Dictionary<any[]>> {\n+ async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName: string): Promise<Index[]> {\n const sql = this.getIndexesSQL(tableName, schemaName);\n const indexes = await connection.execute<any[]>(sql);\n \n- return indexes.reduce((ret, index: any) => {\n- ret[index.column_name] = ret[index.column_name] || [];\n- ret[index.column_name].push({\n- columnName: index.column_name,\n- keyName: index.constraint_name,\n- unique: index.unique,\n- primary: index.primary,\n- });\n-\n- return ret;\n- }, {});\n+ return indexes.map(index => ({\n+ columnName: index.column_name,\n+ keyName: index.constraint_name,\n+ unique: index.unique,\n+ primary: index.primary,\n+ }));\n }\n \n getForeignKeysSQL(tableName: string, schemaName: string): string {\n@@ -147,13 +142,11 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n }\n \n private getIndexesSQL(tableName: string, schemaName: string): string {\n- return `select i.indexname as constraint_name, k.column_name, c.contype = 'u' as unique, c.contype = 'p' as primary\n- from pg_catalog.pg_indexes i\n- join pg_catalog.pg_constraint c on c.conname = i.indexname\n- join pg_catalog.pg_class rel on rel.oid = c.conrelid\n- join pg_catalog.pg_namespace nsp on nsp.oid = c.connamespace\n- join information_schema.key_column_usage k on k.constraint_name = c.conname and k.table_schema = 'public' and k.table_name = '${tableName}'\n- where nsp.nspname = '${schemaName}' and rel.relname = '${tableName}'`;\n+ return `select relname as constraint_name, attname as column_name, idx.indisunique as unique, idx.indisprimary as primary\n+ from pg_index idx\n+ left join pg_class AS i on i.oid = idx.indexrelid\n+ left join pg_attribute a on a.attrelid = idx.indrelid and a.attnum = ANY(idx.indkey) and a.attnum > 0\n+ where indrelid = '${schemaName}.${tableName}'::regclass`;\n }\n \n }\n", "SchemaGenerator.ts": "@@ -178,12 +178,14 @@ export class SchemaGenerator {\n \n meta.indexes.forEach(index => {\n const properties = Utils.flatten(Utils.asArray(index.properties).map(prop => meta.properties[prop].fieldNames));\n- table.index(properties, index.name, index.type);\n+ const indexName = index.name || this.helper.getIndexName(meta.collection, properties, false);\n+ table.index(properties, indexName, index.type);\n });\n \n meta.uniques.forEach(index => {\n const properties = Utils.flatten(Utils.asArray(index.properties).map(prop => meta.properties[prop].fieldNames));\n- table.unique(properties, index.name);\n+ const indexName = index.name || this.helper.getIndexName(meta.collection, properties, true);\n+ table.unique(properties, indexName);\n });\n \n this.helper.finalizeTable(table);\n@@ -296,7 +298,7 @@ export class SchemaGenerator {\n \n return meta2.primaryKeys.map((pk, idx) => {\n const col = table.specificType(prop.joinColumns[idx], meta2.properties[pk].columnTypes[0]);\n- return this.configureColumn(meta, prop, col, meta2.properties[pk], alter);\n+ return this.configureColumn(meta, prop, col, prop.joinColumns[idx], meta2.properties[pk], alter);\n });\n }\n \n@@ -311,11 +313,11 @@ export class SchemaGenerator {\n \n if (prop.enum && prop.items && prop.items.every(item => Utils.isString(item))) {\n const col = table.enum(prop.fieldNames[0], prop.items!);\n- return this.configureColumn(meta, prop, col, undefined, alter);\n+ return this.configureColumn(meta, prop, col, prop.fieldNames[0], undefined, alter);\n }\n \n const col = table.specificType(prop.fieldNames[0], prop.columnTypes[0]);\n- return this.configureColumn(meta, prop, col, undefined, alter);\n+ return this.configureColumn(meta, prop, col, prop.fieldNames[0], undefined, alter);\n }\n \n private updateTableColumn(table: TableBuilder, meta: EntityMetadata, prop: EntityProperty, column: Column, diff: IsSame): void {\n@@ -334,7 +336,6 @@ export class SchemaGenerator {\n }\n \n this.createTableColumn(table, meta, prop, diff).map(col => col.alter());\n- // this.createSimpleTableColumn(table, meta, prop, diff).alter();\n }\n \n private dropTableColumn(table: TableBuilder, column: Column): void {\n@@ -346,12 +347,12 @@ export class SchemaGenerator {\n table.dropColumn(column.name);\n }\n \n- private configureColumn(meta: EntityMetadata, prop: EntityProperty, col: ColumnBuilder, pkProp = prop, alter?: IsSame) {\n+ private configureColumn<T>(meta: EntityMetadata<T>, prop: EntityProperty<T>, col: ColumnBuilder, columnName: string, pkProp = prop, alter?: IsSame) {\n const nullable = (alter && this.platform.requiresNullableForAlteringColumn()) || prop.nullable!;\n const indexed = 'index' in prop ? prop.index : (prop.reference !== ReferenceType.SCALAR && this.helper.indexForeignKeys());\n const index = (indexed || (prop.primary && meta.compositePK)) && !(alter && alter.sameIndex);\n- const indexName = Utils.isString(prop.index) ? prop.index : undefined;\n- const uniqueName = Utils.isString(prop.unique) ? prop.unique : undefined;\n+ const indexName = this.getIndexName(meta, prop, false, columnName);\n+ const uniqueName = this.getIndexName(meta, prop, true, columnName);\n const hasDefault = typeof prop.default !== 'undefined'; // support falsy default values like `0`, `false` or empty string\n \n Utils.runIfNotEmpty(() => col.nullable(), nullable);\n@@ -365,6 +366,17 @@ export class SchemaGenerator {\n return col;\n }\n \n+ private getIndexName<T>(meta: EntityMetadata<T>, prop: EntityProperty<T>, unique: boolean, columnName: string): string {\n+ const type = unique ? 'unique' : 'index';\n+ const value = prop[type];\n+\n+ if (Utils.isString(value)) {\n+ return value;\n+ }\n+\n+ return this.helper.getIndexName(meta.collection, [columnName], unique);\n+ }\n+\n private createForeignKeys(table: TableBuilder, meta: EntityMetadata): void {\n Object.values(meta.properties)\n .filter(prop => prop.reference === ReferenceType.MANY_TO_ONE || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner))\n", "SchemaHelper.ts": "@@ -64,15 +64,8 @@ export abstract class SchemaHelper {\n return found || defaultType;\n }\n \n- async getPrimaryKeys(connection: AbstractSqlConnection, indexes: Dictionary<Index[]>, tableName: string, schemaName?: string): Promise<string[]> {\n- const ret = [];\n-\n- for (const idx of Object.values(indexes)) {\n- const pks = idx.filter(i => i.primary).map(i => i.columnName);\n- ret.push(...pks);\n- }\n-\n- return ret;\n+ async getPrimaryKeys(connection: AbstractSqlConnection, indexes: Index[], tableName: string, schemaName?: string): Promise<string[]> {\n+ return indexes.filter(i => i.primary).map(i => i.columnName);\n }\n \n async getForeignKeys(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Dictionary> {\n@@ -92,7 +85,7 @@ export abstract class SchemaHelper {\n throw new Error('Not supported by given driver');\n }\n \n- async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Dictionary<any[]>> {\n+ async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Index[]> {\n throw new Error('Not supported by given driver');\n }\n \n@@ -100,6 +93,14 @@ export abstract class SchemaHelper {\n throw new Error('Not supported by given driver');\n }\n \n+ /**\n+ * Returns the default name of index for the given columns\n+ */\n+ getIndexName(tableName: string, columns: string[], unique: boolean): string {\n+ const type = unique ? 'unique' : 'index';\n+ return `${tableName}_${columns.join('_')}_${type}`;\n+ }\n+\n mapForeignKeys(fks: any[]): Dictionary {\n return fks.reduce((ret, fk: any) => {\n ret[fk.column_name] = {\n", "SqliteSchemaHelper.ts": "@@ -1,7 +1,7 @@\n import { IsSame, SchemaHelper } from './SchemaHelper';\n import { Dictionary, EntityProperty } from '../typings';\n import { AbstractSqlConnection } from '../connections/AbstractSqlConnection';\n-import { Column } from './DatabaseTable';\n+import { Column, Index } from './DatabaseTable';\n import { Connection } from '../connections';\n \n export class SqliteSchemaHelper extends SchemaHelper {\n@@ -68,7 +68,7 @@ export class SqliteSchemaHelper extends SchemaHelper {\n return cols.filter(col => !!col.pk).map(col => col.name);\n }\n \n- async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Dictionary<any[]>> {\n+ async getIndexes(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<Index[]> {\n const indexes = await connection.execute<any[]>(`pragma index_list(\\`${tableName}\\`)`);\n \n for (const index of indexes) {\n@@ -76,16 +76,12 @@ export class SqliteSchemaHelper extends SchemaHelper {\n index.column_name = res[0].name;\n }\n \n- return indexes.reduce((ret, index) => {\n- ret[index.column_name] = ret[index.column_name] || [];\n- ret[index.column_name].push({\n- columnName: index.column_name,\n- keyName: index.name,\n- unique: !!index.unique,\n- });\n-\n- return ret;\n- }, {});\n+ return indexes.map(index => ({\n+ columnName: index.column_name,\n+ keyName: index.name,\n+ unique: !!index.unique,\n+ primary: false,\n+ }));\n }\n \n getRenameColumnSQL(tableName: string, from: Column, to: EntityProperty, idx = 0): string {\n", "EntityGenerator.test.ts": "@@ -3,6 +3,7 @@ import { initORMMySql, initORMPostgreSql, initORMSqlite } from './bootstrap';\n import { EntityGenerator } from '../lib/schema/EntityGenerator';\n import { MongoDriver } from '../lib/drivers/MongoDriver';\n import { Configuration, MikroORM } from '../lib';\n+import { DatabaseTable } from '../lib/schema/DatabaseTable';\n \n describe('EntityGenerator', () => {\n \n@@ -34,10 +35,37 @@ describe('EntityGenerator', () => {\n const dump = await generator.generate();\n expect(dump).toMatchSnapshot('postgres-entity-dump');\n \n- const writer = { writeLine: jest.fn(), blankLineIfLastNot: jest.fn(), blankLine: jest.fn(), block: jest.fn(), write: jest.fn() };\n- generator.createProperty(writer as any, { name: 'test', type: 'varchar(50)', defaultValue: 'null::character varying', nullable: true } as any, []);\n- expect(writer.writeLine.mock.calls.length).toBe(2);\n- expect(writer.writeLine.mock.calls[0][0]).toBe(`@Property({ type: 'varchar(50)', nullable: true })`);\n+ const table = new DatabaseTable('test_entity', 'public');\n+ Object.assign(table, {\n+ indexes: [],\n+ columns: {\n+ name: {\n+ name: 'name',\n+ type: 'varchar(50)',\n+ maxLength: 50,\n+ nullable: true,\n+ defaultValue: 'null::character varying',\n+ indexes: [],\n+ },\n+ test: {\n+ name: 'test',\n+ type: 'varchar(50)',\n+ maxLength: 50,\n+ nullable: true,\n+ defaultValue: 'foo',\n+ indexes: [],\n+ },\n+ },\n+ });\n+\n+ const helper = orm.em.getDriver().getPlatform().getSchemaHelper()!;\n+ const meta = table.getEntityDeclaration(orm.config.getNamingStrategy(), helper);\n+ expect(meta.properties.name.default).toBeUndefined();\n+ expect(meta.properties.name.nullable).toBe(true);\n+ expect(meta.properties.name.columnTypes[0]).toBe('varchar(50)');\n+ expect(meta.properties.test.default).toBe('foo');\n+ expect(meta.properties.test.nullable).toBe(true);\n+ expect(meta.properties.test.columnTypes[0]).toBe('varchar(50)');\n \n await orm.close(true);\n });\n", "EntityGenerator.test.ts.snap": "@@ -8,7 +8,7 @@ import { Author2 } from './Author2';\n @Entity()\n export class Address2 {\n \n- @OneToOne({ entity: () => Author2, cascade: [Cascade.ALL], primary: true })\n+ @OneToOne({ entity: () => Author2, cascade: [Cascade.ALL], primary: true, index: 'address2_author_id_index', unique: 'address2_author_id_unique' })\n author!: Author2;\n \n @Property({ length: 255 })\n@@ -16,10 +16,12 @@ export class Address2 {\n \n }\n \",\n- \"import { Cascade, Entity, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Cascade, Entity, Index, ManyToOne, PrimaryKey, Property, Unique } from 'mikro-orm';\n import { Book2 } from './Book2';\n \n @Entity()\n+@Index({ name: 'author2_name_age_index', properties: ['name', 'age'] })\n+@Unique({ name: 'author2_name_email_unique', properties: ['name', 'email'] })\n export class Author2 {\n \n @PrimaryKey()\n@@ -31,15 +33,19 @@ export class Author2 {\n @Property({ length: 3, default: \\`current_timestamp(3)\\` })\n updatedAt!: Date;\n \n+ @Index({ name: 'custom_idx_name_123' })\n @Property({ length: 255 })\n name!: string;\n \n+ @Index({ name: 'custom_email_index_name' })\n+ @Unique({ name: 'custom_email_unique_name' })\n @Property({ length: 255 })\n email!: string;\n \n @Property({ nullable: true })\n age?: number;\n \n+ @Index({ name: 'author2_terms_accepted_index' })\n @Property()\n termsAccepted: boolean = true;\n \n@@ -49,16 +55,18 @@ export class Author2 {\n @Property({ nullable: true })\n identities?: object;\n \n+ @Index({ name: 'author2_born_index' })\n @Property({ columnType: 'date', nullable: true })\n born?: string;\n \n+ @Index({ name: 'born_time_idx' })\n @Property({ columnType: 'time', nullable: true })\n bornTime?: string;\n \n- @ManyToOne({ entity: () => Book2, cascade: [Cascade.MERGE, Cascade.REMOVE], nullable: true })\n+ @ManyToOne({ entity: () => Book2, cascade: [Cascade.MERGE, Cascade.REMOVE], nullable: true, index: 'author2_favourite_book_uuid_pk_index' })\n favouriteBook?: Book2;\n \n- @ManyToOne({ entity: () => Author2, nullable: true })\n+ @ManyToOne({ entity: () => Author2, nullable: true, index: 'author2_favourite_author_id_index' })\n favouriteAuthor?: Author2;\n \n }\n@@ -69,10 +77,10 @@ import { Author2 } from './Author2';\n @Entity()\n export class Author2ToAuthor2 {\n \n- @ManyToOne({ entity: () => Author2, fieldName: 'author2_1_id', cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Author2, fieldName: 'author2_1_id', cascade: [Cascade.ALL], primary: true, index: 'author2_to_author2_author2_1_id_index' })\n author21!: Author2;\n \n- @ManyToOne({ entity: () => Author2, fieldName: 'author2_2_id', cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Author2, fieldName: 'author2_2_id', cascade: [Cascade.ALL], primary: true, index: 'author2_to_author2_author2_2_id_index' })\n author22!: Author2;\n \n }\n@@ -83,10 +91,10 @@ import { Author2 } from './Author2';\n @Entity()\n export class AuthorToFriend {\n \n- @ManyToOne({ entity: () => Author2, fieldName: 'author2_1_id', cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Author2, fieldName: 'author2_1_id', cascade: [Cascade.ALL], primary: true, index: 'author_to_friend_author2_1_id_index' })\n author21!: Author2;\n \n- @ManyToOne({ entity: () => Author2, fieldName: 'author2_2_id', cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Author2, fieldName: 'author2_2_id', cascade: [Cascade.ALL], primary: true, index: 'author_to_friend_author2_2_id_index' })\n author22!: Author2;\n \n }\n@@ -107,22 +115,22 @@ export class Book2 {\n @Property({ length: 255, nullable: true })\n title?: string;\n \n- @Property({ type: 'text', length: 65535, nullable: true })\n+ @Property({ columnType: 'text', length: 65535, nullable: true })\n perex?: string;\n \n- @Property({ type: 'float', nullable: true })\n+ @Property({ columnType: 'float', nullable: true })\n price?: number;\n \n- @Property({ type: 'double', nullable: true })\n+ @Property({ columnType: 'double', nullable: true })\n double?: number;\n \n @Property({ nullable: true })\n meta?: object;\n \n- @ManyToOne({ entity: () => Author2, cascade: [Cascade.MERGE] })\n+ @ManyToOne({ entity: () => Author2, cascade: [Cascade.MERGE], index: 'book2_author_id_index' })\n author!: Author2;\n \n- @ManyToOne({ entity: () => Publisher2, cascade: [Cascade.ALL], nullable: true })\n+ @ManyToOne({ entity: () => Publisher2, cascade: [Cascade.ALL], nullable: true, index: 'book2_publisher_id_index' })\n publisher?: Publisher2;\n \n @Property({ length: 255, nullable: true })\n@@ -140,10 +148,10 @@ export class Book2ToBookTag2 {\n @PrimaryKey()\n order!: number;\n \n- @ManyToOne({ entity: () => Book2, cascade: [Cascade.ALL] })\n+ @ManyToOne({ entity: () => Book2, cascade: [Cascade.ALL], index: 'book2_to_book_tag2_book2_uuid_pk_index' })\n book2!: Book2;\n \n- @ManyToOne({ entity: () => BookTag2, cascade: [Cascade.ALL] })\n+ @ManyToOne({ entity: () => BookTag2, cascade: [Cascade.ALL], index: 'book2_to_book_tag2_book_tag2_id_index' })\n bookTag2!: BookTag2;\n \n }\n@@ -153,7 +161,7 @@ export class Book2ToBookTag2 {\n @Entity()\n export class BookTag2 {\n \n- @PrimaryKey({ type: 'bigint' })\n+ @PrimaryKey({ columnType: 'bigint' })\n id!: string;\n \n @Property({ length: 50 })\n@@ -168,22 +176,24 @@ import { BookTag2 } from './BookTag2';\n @Entity()\n export class BookToTagUnordered {\n \n- @ManyToOne({ entity: () => Book2, cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Book2, cascade: [Cascade.ALL], primary: true, index: 'book_to_tag_unordered_book2_uuid_pk_index' })\n book2!: Book2;\n \n- @ManyToOne({ entity: () => BookTag2, cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => BookTag2, cascade: [Cascade.ALL], primary: true, index: 'book_to_tag_unordered_book_tag2_id_index' })\n bookTag2!: BookTag2;\n \n }\n \",\n- \"import { Entity, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Entity, Index, PrimaryKey, Property } from 'mikro-orm';\n \n @Entity()\n export class Car2 {\n \n+ @Index({ name: 'car2_name_index' })\n @PrimaryKey({ length: 255 })\n name!: string;\n \n+ @Index({ name: 'car2_year_index' })\n @PrimaryKey()\n year!: number;\n \n@@ -204,21 +214,22 @@ export class CarOwner2 {\n @Property({ length: 255 })\n name!: string;\n \n- @ManyToOne({ entity: () => Car2 })\n+ @ManyToOne({ entity: () => Car2, index: 'car_owner2_car_name_car_year_idx' })\n car!: Car2;\n \n }\n \",\n- \"import { Entity, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Entity, Index, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n import { Test2 } from './Test2';\n \n @Entity()\n export class Configuration2 {\n \n+ @Index({ name: 'configuration2_property_index' })\n @PrimaryKey({ length: 255 })\n property!: string;\n \n- @ManyToOne({ entity: () => Test2, primary: true })\n+ @ManyToOne({ entity: () => Test2, primary: true, index: 'configuration2_test_id_index' })\n test!: Test2;\n \n @Property({ length: 255 })\n@@ -238,10 +249,10 @@ export class FooBar2 {\n @Property({ length: 255 })\n name!: string;\n \n- @OneToOne({ entity: () => FooBaz2, nullable: true })\n+ @OneToOne({ entity: () => FooBaz2, nullable: true, index: 'foo_bar2_baz_id_index', unique: 'foo_bar2_baz_id_unique' })\n baz?: FooBaz2;\n \n- @OneToOne({ entity: () => FooBar2, nullable: true })\n+ @OneToOne({ entity: () => FooBar2, nullable: true, index: 'foo_bar2_foo_bar_id_index', unique: 'foo_bar2_foo_bar_id_unique' })\n fooBar?: FooBar2;\n \n @Property({ default: \\`CURRENT_TIMESTAMP\\` })\n@@ -272,10 +283,10 @@ import { FooBaz2 } from './FooBaz2';\n @Entity()\n export class FooParam2 {\n \n- @ManyToOne({ entity: () => FooBar2, primary: true })\n+ @ManyToOne({ entity: () => FooBar2, primary: true, index: 'foo_param2_bar_id_index' })\n bar!: FooBar2;\n \n- @ManyToOne({ entity: () => FooBaz2, primary: true })\n+ @ManyToOne({ entity: () => FooBaz2, primary: true, index: 'foo_param2_baz_id_index' })\n baz!: FooBaz2;\n \n @Property({ length: 255 })\n@@ -294,10 +305,10 @@ export class Publisher2 {\n @Property({ length: 255 })\n name!: string;\n \n- @Property({ type: 'enum' })\n+ @Property({ columnType: 'enum' })\n type!: string;\n \n- @Property({ type: 'enum' })\n+ @Property({ columnType: 'enum' })\n type2!: string;\n \n @Property({ nullable: true })\n@@ -309,7 +320,7 @@ export class Publisher2 {\n @Property({ nullable: true })\n enum3?: boolean;\n \n- @Property({ type: 'enum', nullable: true })\n+ @Property({ columnType: 'enum', nullable: true })\n enum4?: string;\n \n }\n@@ -324,10 +335,10 @@ export class Publisher2ToTest2 {\n @PrimaryKey()\n id!: number;\n \n- @ManyToOne({ entity: () => Publisher2, cascade: [Cascade.ALL] })\n+ @ManyToOne({ entity: () => Publisher2, cascade: [Cascade.ALL], index: 'publisher2_to_test2_publisher2_id_index' })\n publisher2!: Publisher2;\n \n- @ManyToOne({ entity: () => Test2, cascade: [Cascade.ALL] })\n+ @ManyToOne({ entity: () => Test2, cascade: [Cascade.ALL], index: 'publisher2_to_test2_test2_id_index' })\n test2!: Test2;\n \n }\n@@ -345,13 +356,13 @@ export class Test2 {\n @Property({ length: 255, nullable: true })\n name?: string;\n \n- @OneToOne({ entity: () => Book2, cascade: [Cascade.MERGE], nullable: true })\n+ @OneToOne({ entity: () => Book2, cascade: [Cascade.MERGE], nullable: true, index: 'test2_book_uuid_pk_index', unique: 'test2_book_uuid_pk_unique' })\n book?: Book2;\n \n @Property()\n version: number = 1;\n \n- @OneToOne({ entity: () => FooBar2, fieldName: 'foo___bar', nullable: true })\n+ @OneToOne({ entity: () => FooBar2, fieldName: 'foo___bar', nullable: true, index: 'test2_foo___bar_index', unique: 'test2_foo___bar_unique' })\n fooBar?: FooBar2;\n \n @Property({ fieldName: 'foo___baz', nullable: true })\n@@ -359,14 +370,16 @@ export class Test2 {\n \n }\n \",\n- \"import { Entity, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Entity, Index, PrimaryKey, Property } from 'mikro-orm';\n \n @Entity()\n export class User2 {\n \n+ @Index({ name: 'user2_first_name_index' })\n @PrimaryKey({ length: 100 })\n firstName!: string;\n \n+ @Index({ name: 'user2_last_name_index' })\n @PrimaryKey({ length: 100 })\n lastName!: string;\n \n@@ -382,10 +395,10 @@ import { User2 } from './User2';\n @Entity()\n export class User2ToCar2 {\n \n- @ManyToOne({ entity: () => User2, cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => User2, cascade: [Cascade.ALL], primary: true, index: 'user2_to_car2_user2_first_name_user2_last_name_index' })\n user2!: User2;\n \n- @ManyToOne({ entity: () => Car2, cascade: [Cascade.ALL], primary: true })\n+ @ManyToOne({ entity: () => Car2, cascade: [Cascade.ALL], primary: true, index: 'user2_to_car2_car2_name_car2_year_index' })\n car2!: Car2;\n \n }\n@@ -401,7 +414,7 @@ import { Author2 } from './Author2';\n @Entity()\n export class Address2 {\n \n- @OneToOne({ entity: () => Author2, cascade: [Cascade.ALL], primary: true })\n+ @OneToOne({ entity: () => Author2, cascade: [Cascade.ALL], primary: true, unique: 'address2_author_id_unique' })\n author!: Author2;\n \n @Property({ length: 255 })\n@@ -409,10 +422,12 @@ export class Address2 {\n \n }\n \",\n- \"import { Cascade, Entity, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Cascade, Entity, Index, ManyToOne, PrimaryKey, Property, Unique } from 'mikro-orm';\n import { Book2 } from './Book2';\n \n @Entity()\n+@Index({ name: 'author2_name_age_index', properties: ['name', 'age'] })\n+@Unique({ name: 'author2_name_email_unique', properties: ['name', 'email'] })\n export class Author2 {\n \n @PrimaryKey()\n@@ -424,15 +439,19 @@ export class Author2 {\n @Property({ length: 3, default: \\`current_timestamp(3)\\` })\n updatedAt!: Date;\n \n+ @Index({ name: 'custom_idx_name_123' })\n @Property({ length: 255 })\n name!: string;\n \n+ @Index({ name: 'custom_email_index_name' })\n+ @Unique({ name: 'custom_email_unique_name' })\n @Property({ length: 255 })\n email!: string;\n \n @Property({ nullable: true })\n age?: number;\n \n+ @Index({ name: 'author2_terms_accepted_index' })\n @Property()\n termsAccepted: boolean = true;\n \n@@ -442,10 +461,12 @@ export class Author2 {\n @Property({ nullable: true })\n identities?: object;\n \n- @Property({ type: 'date', nullable: true })\n+ @Index({ name: 'author2_born_index' })\n+ @Property({ columnType: 'date', nullable: true })\n born?: Date;\n \n- @Property({ type: 'time', nullable: true })\n+ @Index({ name: 'born_time_idx' })\n+ @Property({ columnType: 'time', nullable: true })\n bornTime?: Date;\n \n @ManyToOne({ entity: () => Book2, cascade: [Cascade.MERGE, Cascade.REMOVE], nullable: true })\n@@ -500,13 +521,13 @@ export class Book2 {\n @Property({ length: 255, nullable: true })\n title?: string;\n \n- @Property({ type: 'text', nullable: true })\n+ @Property({ columnType: 'text', nullable: true })\n perex?: string;\n \n- @Property({ type: 'float8', nullable: true })\n+ @Property({ columnType: 'float8', nullable: true })\n price?: number;\n \n- @Property({ type: 'numeric', nullable: true })\n+ @Property({ columnType: 'numeric', nullable: true })\n double?: number;\n \n @Property({ nullable: true })\n@@ -546,7 +567,7 @@ export class Book2ToBookTag2 {\n @Entity()\n export class BookTag2 {\n \n- @PrimaryKey({ type: 'int8' })\n+ @PrimaryKey({ columnType: 'int8' })\n id!: string;\n \n @Property({ length: 50 })\n@@ -569,16 +590,17 @@ export class BookToTagUnordered {\n \n }\n \",\n- \"import { Entity, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Entity, Index, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n import { Test2 } from './Test2';\n \n @Entity()\n export class Configuration2 {\n \n+ @Index({ name: 'configuration2_property_index' })\n @PrimaryKey({ length: 255 })\n property!: string;\n \n- @ManyToOne({ entity: () => Test2, primary: true })\n+ @ManyToOne({ entity: () => Test2, primary: true, index: 'configuration2_test_id_index' })\n test!: Test2;\n \n @Property({ length: 255 })\n@@ -598,10 +620,10 @@ export class FooBar2 {\n @Property({ length: 255 })\n name!: string;\n \n- @OneToOne({ entity: () => FooBaz2, nullable: true })\n+ @OneToOne({ entity: () => FooBaz2, nullable: true, unique: 'foo_bar2_baz_id_unique' })\n baz?: FooBaz2;\n \n- @OneToOne({ entity: () => FooBar2, nullable: true })\n+ @OneToOne({ entity: () => FooBar2, nullable: true, unique: 'foo_bar2_foo_bar_id_unique' })\n fooBar?: FooBar2;\n \n @Property({ default: \\`current_timestamp(0)\\` })\n@@ -632,10 +654,10 @@ import { FooBaz2 } from './FooBaz2';\n @Entity()\n export class FooParam2 {\n \n- @ManyToOne({ entity: () => FooBar2, primary: true })\n+ @ManyToOne({ entity: () => FooBar2, primary: true, index: 'foo_param2_bar_id_index' })\n bar!: FooBar2;\n \n- @ManyToOne({ entity: () => FooBaz2, primary: true })\n+ @ManyToOne({ entity: () => FooBaz2, primary: true, index: 'foo_param2_baz_id_index' })\n baz!: FooBaz2;\n \n @Property({ length: 255 })\n@@ -648,7 +670,7 @@ export class FooParam2 {\n @Entity()\n export class Label2 {\n \n- @PrimaryKey({ type: 'uuid' })\n+ @PrimaryKey({ columnType: 'uuid' })\n uuid!: string;\n \n @Property({ length: 255 })\n@@ -667,22 +689,22 @@ export class Publisher2 {\n @Property({ length: 255 })\n name!: string;\n \n- @Property({ type: 'text' })\n+ @Property({ columnType: 'text' })\n type!: string;\n \n- @Property({ type: 'text' })\n+ @Property({ columnType: 'text' })\n type2!: string;\n \n- @Property({ type: 'int2', nullable: true })\n+ @Property({ columnType: 'int2', nullable: true })\n enum1?: number;\n \n- @Property({ type: 'int2', nullable: true })\n+ @Property({ columnType: 'int2', nullable: true })\n enum2?: number;\n \n- @Property({ type: 'int2', nullable: true })\n+ @Property({ columnType: 'int2', nullable: true })\n enum3?: number;\n \n- @Property({ type: 'text', nullable: true })\n+ @Property({ columnType: 'text', nullable: true })\n enum4?: string;\n \n }\n@@ -717,7 +739,7 @@ export class Test2 {\n @Property({ length: 255, nullable: true })\n name?: string;\n \n- @OneToOne({ entity: () => Book2, cascade: [Cascade.MERGE], nullable: true })\n+ @OneToOne({ entity: () => Book2, cascade: [Cascade.MERGE], nullable: true, unique: 'test2_book_uuid_pk_unique' })\n book?: Book2;\n \n @Property()\n@@ -733,7 +755,7 @@ export class Test2 {\n \n exports[`EntityGenerator generate entities from schema [sqlite]: sqlite-entity-dump 1`] = `\n Array [\n- \"import { Entity, ManyToOne, PrimaryKey, Property } from 'mikro-orm';\n+ \"import { Entity, ManyToOne, PrimaryKey, Property, Unique } from 'mikro-orm';\n import { Book3 } from './Book3';\n \n @Entity()\n@@ -751,6 +773,7 @@ export class Author3 {\n @Property()\n name!: string;\n \n+ @Unique({ name: 'author3_email_unique' })\n @Property()\n email!: string;\n \n@@ -769,7 +792,7 @@ export class Author3 {\n @Property({ columnType: 'time', nullable: true })\n bornTime?: string;\n \n- @ManyToOne({ entity: () => Book3, nullable: true })\n+ @ManyToOne({ entity: () => Book3, nullable: true, index: 'author3_favourite_book_id_index' })\n favouriteBook?: Book3;\n \n }\n@@ -796,10 +819,10 @@ export class Book3 {\n @Property({ nullable: true })\n foo?: string;\n \n- @ManyToOne({ entity: () => Author3, nullable: true })\n+ @ManyToOne({ entity: () => Author3, nullable: true, index: 'book3_author_id_index' })\n author?: Author3;\n \n- @ManyToOne({ entity: () => Publisher3, nullable: true })\n+ @ManyToOne({ entity: () => Publisher3, nullable: true, index: 'book3_publisher_id_index' })\n publisher?: Publisher3;\n \n }\n@@ -814,10 +837,10 @@ export class Book3ToBookTag3 {\n @PrimaryKey()\n id!: number;\n \n- @ManyToOne({ entity: () => Book3, cascade: [Cascade.ALL], nullable: true })\n+ @ManyToOne({ entity: () => Book3, cascade: [Cascade.ALL], nullable: true, index: 'book3_to_book_tag3_book3_id_index' })\n book3?: Book3;\n \n- @ManyToOne({ entity: () => BookTag3, cascade: [Cascade.ALL], nullable: true })\n+ @ManyToOne({ entity: () => BookTag3, cascade: [Cascade.ALL], nullable: true, index: 'book3_to_book_tag3_book_tag3_id_index' })\n bookTag3?: BookTag3;\n \n }\n@@ -864,10 +887,10 @@ export class Publisher3ToTest3 {\n @PrimaryKey()\n id!: number;\n \n- @ManyToOne({ entity: () => Publisher3, cascade: [Cascade.ALL], nullable: true })\n+ @ManyToOne({ entity: () => Publisher3, cascade: [Cascade.ALL], nullable: true, index: 'publisher3_to_test3_publisher3_id_index' })\n publisher3?: Publisher3;\n \n- @ManyToOne({ entity: () => Test3, cascade: [Cascade.ALL], nullable: true })\n+ @ManyToOne({ entity: () => Test3, cascade: [Cascade.ALL], nullable: true, index: 'publisher3_to_test3_test3_id_index' })\n test3?: Test3;\n \n }\n", "SchemaGenerator.test.ts.snap": "@@ -5,13 +5,13 @@ exports[`SchemaGenerator generate schema from metadata [mysql]: mysql-create-sch\n set foreign_key_checks = 0;\n \n create table \\`author2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`created_at\\` datetime(3) not null default current_timestamp(3), \\`updated_at\\` datetime(3) not null default current_timestamp(3), \\`name\\` varchar(255) not null, \\`email\\` varchar(255) not null, \\`age\\` int(11) null, \\`terms_accepted\\` tinyint(1) not null default false, \\`optional\\` tinyint(1) null, \\`identities\\` json null, \\`born\\` date null, \\`born_time\\` time null, \\`favourite_book_uuid_pk\\` varchar(36) null, \\`favourite_author_id\\` int(11) unsigned null) default character set utf8 engine = InnoDB;\n+alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n alter table \\`author2\\` add unique \\`custom_email_unique_name\\`(\\`email\\`);\n+alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`author2_born_index\\`(\\`born\\`);\n alter table \\`author2\\` add index \\`born_time_idx\\`(\\`born_time\\`);\n alter table \\`author2\\` add index \\`author2_favourite_book_uuid_pk_index\\`(\\`favourite_book_uuid_pk\\`);\n alter table \\`author2\\` add index \\`author2_favourite_author_id_index\\`(\\`favourite_author_id\\`);\n-alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n-alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`custom_idx_name_123\\`(\\`name\\`);\n alter table \\`author2\\` add index \\`author2_name_age_index\\`(\\`name\\`, \\`age\\`);\n alter table \\`author2\\` add unique \\`author2_name_email_unique\\`(\\`name\\`, \\`email\\`);\n@@ -58,8 +58,7 @@ alter table \\`car2\\` add index \\`car2_year_index\\`(\\`year\\`);\n alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`);\n \n create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_name_index\\`(\\`car_name\\`);\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_year_index\\`(\\`car_year\\`);\n+alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`);\n \n create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8 engine = InnoDB;\n alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`);\n@@ -90,11 +89,9 @@ alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_publisher2_i\n alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_test2_id_index\\`(\\`test2_id\\`);\n \n create table \\`user2_to_car2\\` (\\`user2_first_name\\` varchar(100) not null, \\`user2_last_name\\` varchar(100) not null, \\`car2_name\\` varchar(100) not null, \\`car2_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_index\\`(\\`user2_first_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_last_name_index\\`(\\`user2_last_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_index\\`(\\`car2_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_year_index\\`(\\`car2_year\\`);\n alter table \\`user2_to_car2\\` add primary key \\`user2_to_car2_pkey\\`(\\`user2_first_name\\`, \\`user2_last_name\\`, \\`car2_name\\`, \\`car2_year\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_user2_last_name_index\\`(\\`user2_first_name\\`, \\`user2_last_name\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_car2_year_index\\`(\\`car2_name\\`, \\`car2_year\\`);\n \n alter table \\`author2\\` add constraint \\`author2_favourite_book_uuid_pk_foreign\\` foreign key (\\`favourite_book_uuid_pk\\`) references \\`book2\\` (\\`uuid_pk\\`) on update no action on delete cascade;\n alter table \\`author2\\` add constraint \\`author2_favourite_author_id_foreign\\` foreign key (\\`favourite_author_id\\`) references \\`author2\\` (\\`id\\`) on update cascade on delete set null;\n@@ -194,13 +191,13 @@ drop table if exists \\`publisher2_to_test2\\`;\n drop table if exists \\`user2_to_car2\\`;\n \n create table \\`author2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`created_at\\` datetime(3) not null default current_timestamp(3), \\`updated_at\\` datetime(3) not null default current_timestamp(3), \\`name\\` varchar(255) not null, \\`email\\` varchar(255) not null, \\`age\\` int(11) null, \\`terms_accepted\\` tinyint(1) not null default false, \\`optional\\` tinyint(1) null, \\`identities\\` json null, \\`born\\` date null, \\`born_time\\` time null, \\`favourite_book_uuid_pk\\` varchar(36) null, \\`favourite_author_id\\` int(11) unsigned null) default character set utf8 engine = InnoDB;\n+alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n alter table \\`author2\\` add unique \\`custom_email_unique_name\\`(\\`email\\`);\n+alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`author2_born_index\\`(\\`born\\`);\n alter table \\`author2\\` add index \\`born_time_idx\\`(\\`born_time\\`);\n alter table \\`author2\\` add index \\`author2_favourite_book_uuid_pk_index\\`(\\`favourite_book_uuid_pk\\`);\n alter table \\`author2\\` add index \\`author2_favourite_author_id_index\\`(\\`favourite_author_id\\`);\n-alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n-alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`custom_idx_name_123\\`(\\`name\\`);\n alter table \\`author2\\` add index \\`author2_name_age_index\\`(\\`name\\`, \\`age\\`);\n alter table \\`author2\\` add unique \\`author2_name_email_unique\\`(\\`name\\`, \\`email\\`);\n@@ -247,8 +244,7 @@ alter table \\`car2\\` add index \\`car2_year_index\\`(\\`year\\`);\n alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`);\n \n create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_name_index\\`(\\`car_name\\`);\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_year_index\\`(\\`car_year\\`);\n+alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`);\n \n create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8 engine = InnoDB;\n alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`);\n@@ -279,11 +275,9 @@ alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_publisher2_i\n alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_test2_id_index\\`(\\`test2_id\\`);\n \n create table \\`user2_to_car2\\` (\\`user2_first_name\\` varchar(100) not null, \\`user2_last_name\\` varchar(100) not null, \\`car2_name\\` varchar(100) not null, \\`car2_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_index\\`(\\`user2_first_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_last_name_index\\`(\\`user2_last_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_index\\`(\\`car2_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_year_index\\`(\\`car2_year\\`);\n alter table \\`user2_to_car2\\` add primary key \\`user2_to_car2_pkey\\`(\\`user2_first_name\\`, \\`user2_last_name\\`, \\`car2_name\\`, \\`car2_year\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_user2_last_name_index\\`(\\`user2_first_name\\`, \\`user2_last_name\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_car2_year_index\\`(\\`car2_name\\`, \\`car2_year\\`);\n \n alter table \\`author2\\` add constraint \\`author2_favourite_book_uuid_pk_foreign\\` foreign key (\\`favourite_book_uuid_pk\\`) references \\`book2\\` (\\`uuid_pk\\`) on update no action on delete cascade;\n alter table \\`author2\\` add constraint \\`author2_favourite_author_id_foreign\\` foreign key (\\`favourite_author_id\\`) references \\`author2\\` (\\`id\\`) on update cascade on delete set null;\n@@ -351,11 +345,11 @@ exports[`SchemaGenerator generate schema from metadata [postgres]: postgres-crea\n set session_replication_role = 'replica';\n \n create table \\\\\"author2\\\\\" (\\\\\"id\\\\\" serial primary key, \\\\\"created_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"updated_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"name\\\\\" varchar(255) not null, \\\\\"email\\\\\" varchar(255) not null, \\\\\"age\\\\\" int4 null, \\\\\"terms_accepted\\\\\" bool not null default false, \\\\\"optional\\\\\" bool null, \\\\\"identities\\\\\" json null, \\\\\"born\\\\\" date null, \\\\\"born_time\\\\\" time(0) null, \\\\\"favourite_book_uuid_pk\\\\\" varchar(36) null, \\\\\"favourite_author_id\\\\\" int4 null);\n+create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"custom_email_unique_name\\\\\" unique (\\\\\"email\\\\\");\n+create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"author2_born_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"born\\\\\");\n create index \\\\\"born_time_idx\\\\\" on \\\\\"author2\\\\\" (\\\\\"born_time\\\\\");\n-create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n-create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"custom_idx_name_123\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\");\n create index \\\\\"author2_name_age_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\", \\\\\"age\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"author2_name_email_unique\\\\\" unique (\\\\\"name\\\\\", \\\\\"email\\\\\");\n@@ -490,11 +484,11 @@ drop table if exists \\\\\"book_to_tag_unordered\\\\\" cascade;\n drop table if exists \\\\\"publisher2_to_test2\\\\\" cascade;\n \n create table \\\\\"author2\\\\\" (\\\\\"id\\\\\" serial primary key, \\\\\"created_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"updated_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"name\\\\\" varchar(255) not null, \\\\\"email\\\\\" varchar(255) not null, \\\\\"age\\\\\" int4 null, \\\\\"terms_accepted\\\\\" bool not null default false, \\\\\"optional\\\\\" bool null, \\\\\"identities\\\\\" json null, \\\\\"born\\\\\" date null, \\\\\"born_time\\\\\" time(0) null, \\\\\"favourite_book_uuid_pk\\\\\" varchar(36) null, \\\\\"favourite_author_id\\\\\" int4 null);\n+create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"custom_email_unique_name\\\\\" unique (\\\\\"email\\\\\");\n+create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"author2_born_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"born\\\\\");\n create index \\\\\"born_time_idx\\\\\" on \\\\\"author2\\\\\" (\\\\\"born_time\\\\\");\n-create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n-create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"custom_idx_name_123\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\");\n create index \\\\\"author2_name_age_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\", \\\\\"age\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"author2_name_email_unique\\\\\" unique (\\\\\"name\\\\\", \\\\\"email\\\\\");\n@@ -860,13 +854,13 @@ exports[`SchemaGenerator update empty schema from metadata [mysql]: mysql-update\n set foreign_key_checks = 0;\n \n create table \\`author2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`created_at\\` datetime(3) not null default current_timestamp(3), \\`updated_at\\` datetime(3) not null default current_timestamp(3), \\`name\\` varchar(255) not null, \\`email\\` varchar(255) not null, \\`age\\` int(11) null, \\`terms_accepted\\` tinyint(1) not null default false, \\`optional\\` tinyint(1) null, \\`identities\\` json null, \\`born\\` date null, \\`born_time\\` time null, \\`favourite_book_uuid_pk\\` varchar(36) null, \\`favourite_author_id\\` int(11) unsigned null) default character set utf8 engine = InnoDB;\n+alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n alter table \\`author2\\` add unique \\`custom_email_unique_name\\`(\\`email\\`);\n+alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`author2_born_index\\`(\\`born\\`);\n alter table \\`author2\\` add index \\`born_time_idx\\`(\\`born_time\\`);\n alter table \\`author2\\` add index \\`author2_favourite_book_uuid_pk_index\\`(\\`favourite_book_uuid_pk\\`);\n alter table \\`author2\\` add index \\`author2_favourite_author_id_index\\`(\\`favourite_author_id\\`);\n-alter table \\`author2\\` add index \\`custom_email_index_name\\`(\\`email\\`);\n-alter table \\`author2\\` add index \\`author2_terms_accepted_index\\`(\\`terms_accepted\\`);\n alter table \\`author2\\` add index \\`custom_idx_name_123\\`(\\`name\\`);\n alter table \\`author2\\` add index \\`author2_name_age_index\\`(\\`name\\`, \\`age\\`);\n alter table \\`author2\\` add unique \\`author2_name_email_unique\\`(\\`name\\`, \\`email\\`);\n@@ -913,8 +907,7 @@ alter table \\`car2\\` add index \\`car2_year_index\\`(\\`year\\`);\n alter table \\`car2\\` add primary key \\`car2_pkey\\`(\\`name\\`, \\`year\\`);\n \n create table \\`car_owner2\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`name\\` varchar(255) not null, \\`car_name\\` varchar(100) not null, \\`car_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_name_index\\`(\\`car_name\\`);\n-alter table \\`car_owner2\\` add index \\`car_owner2_car_year_index\\`(\\`car_year\\`);\n+alter table \\`car_owner2\\` add index \\`car_owner2_car_name_car_year_index\\`(\\`car_name\\`, \\`car_year\\`);\n \n create table \\`user2\\` (\\`first_name\\` varchar(100) not null, \\`last_name\\` varchar(100) not null, \\`foo\\` int(11) null) default character set utf8 engine = InnoDB;\n alter table \\`user2\\` add index \\`user2_first_name_index\\`(\\`first_name\\`);\n@@ -945,11 +938,9 @@ alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_publisher2_i\n alter table \\`publisher2_to_test2\\` add index \\`publisher2_to_test2_test2_id_index\\`(\\`test2_id\\`);\n \n create table \\`user2_to_car2\\` (\\`user2_first_name\\` varchar(100) not null, \\`user2_last_name\\` varchar(100) not null, \\`car2_name\\` varchar(100) not null, \\`car2_year\\` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_index\\`(\\`user2_first_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_last_name_index\\`(\\`user2_last_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_index\\`(\\`car2_name\\`);\n-alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_year_index\\`(\\`car2_year\\`);\n alter table \\`user2_to_car2\\` add primary key \\`user2_to_car2_pkey\\`(\\`user2_first_name\\`, \\`user2_last_name\\`, \\`car2_name\\`, \\`car2_year\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_user2_first_name_user2_last_name_index\\`(\\`user2_first_name\\`, \\`user2_last_name\\`);\n+alter table \\`user2_to_car2\\` add index \\`user2_to_car2_car2_name_car2_year_index\\`(\\`car2_name\\`, \\`car2_year\\`);\n \n alter table \\`author2\\` add constraint \\`author2_favourite_book_uuid_pk_foreign\\` foreign key (\\`favourite_book_uuid_pk\\`) references \\`book2\\` (\\`uuid_pk\\`) on update no action on delete cascade;\n alter table \\`author2\\` add constraint \\`author2_favourite_author_id_foreign\\` foreign key (\\`favourite_author_id\\`) references \\`author2\\` (\\`id\\`) on update cascade on delete set null;\n@@ -1001,11 +992,11 @@ exports[`SchemaGenerator update empty schema from metadata [postgres]: postgres-\n set session_replication_role = 'replica';\n \n create table \\\\\"author2\\\\\" (\\\\\"id\\\\\" serial primary key, \\\\\"created_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"updated_at\\\\\" timestamptz(3) not null default current_timestamp(3), \\\\\"name\\\\\" varchar(255) not null, \\\\\"email\\\\\" varchar(255) not null, \\\\\"age\\\\\" int4 null, \\\\\"terms_accepted\\\\\" bool not null default false, \\\\\"optional\\\\\" bool null, \\\\\"identities\\\\\" json null, \\\\\"born\\\\\" date null, \\\\\"born_time\\\\\" time(0) null, \\\\\"favourite_book_uuid_pk\\\\\" varchar(36) null, \\\\\"favourite_author_id\\\\\" int4 null);\n+create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"custom_email_unique_name\\\\\" unique (\\\\\"email\\\\\");\n+create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"author2_born_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"born\\\\\");\n create index \\\\\"born_time_idx\\\\\" on \\\\\"author2\\\\\" (\\\\\"born_time\\\\\");\n-create index \\\\\"custom_email_index_name\\\\\" on \\\\\"author2\\\\\" (\\\\\"email\\\\\");\n-create index \\\\\"author2_terms_accepted_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"terms_accepted\\\\\");\n create index \\\\\"custom_idx_name_123\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\");\n create index \\\\\"author2_name_age_index\\\\\" on \\\\\"author2\\\\\" (\\\\\"name\\\\\", \\\\\"age\\\\\");\n alter table \\\\\"author2\\\\\" add constraint \\\\\"author2_name_email_unique\\\\\" unique (\\\\\"name\\\\\", \\\\\"email\\\\\");\n", "composite-keys.mysql.test.ts": "@@ -96,7 +96,7 @@ describe('composite keys in mysql', () => {\n orm.em.clear();\n \n const o1 = await orm.em.findOneOrFail(CarOwner2, owner.id, ['car']);\n- expect(o1.car!.price).toBe(200_000);\n+ expect(o1.car.price).toBe(200_000);\n expect(wrap(o1).toJSON()).toEqual({\n id: 1,\n name: 'John Doe',\n@@ -107,7 +107,7 @@ describe('composite keys in mysql', () => {\n },\n });\n \n- o1.car!.price = 150_000;\n+ o1.car.price = 150_000;\n await orm.em.flush();\n orm.em.clear();\n \n@@ -118,9 +118,9 @@ describe('composite keys in mysql', () => {\n car: { name: 'Audi A8', year: 2010 },\n });\n expect(wrap(o2.car).isInitialized()).toBe(false);\n- expect(o2.car!.price).toBeUndefined();\n+ expect(o2.car.price).toBeUndefined();\n await wrap(o2.car).init();\n- expect(o2.car!.price).toBe(150_000);\n+ expect(o2.car.price).toBe(150_000);\n \n const c1 = await orm.em.findOneOrFail(Car2, { name: car.name, year: car.year });\n expect(c1).toBe(o2.car);\n", "Address2.ts": "@@ -1,10 +1,10 @@\n-import { Entity, Property, OneToOne } from '../../lib';\n+import { Entity, Property, OneToOne, Index } from '../../lib';\n import { Author2 } from './Author2';\n \n @Entity()\n export class Address2 {\n \n- @OneToOne({ entity: () => Author2, primary: true, joinColumn: 'author_id' })\n+ @OneToOne({ entity: () => Author2, primary: true, joinColumn: 'author_id', unique: 'address2_author_id_unique' })\n author: Author2;\n \n @Property()\n", "Car2.ts": "@@ -1,12 +1,14 @@\n-import { Collection, Entity, ManyToMany, PrimaryKey, PrimaryKeyType, Property } from '../../lib';\n+import { Collection, Entity, Index, ManyToMany, PrimaryKey, PrimaryKeyType, Property } from '../../lib';\n import { User2 } from './User2';\n \n @Entity()\n export class Car2 {\n \n+ @Index({ name: 'car2_name_index' })\n @PrimaryKey({ length: 100 })\n name: string;\n \n+ @Index({ name: 'car2_year_index' })\n @PrimaryKey()\n year: number;\n \n", "CarOwner2.ts": "@@ -10,8 +10,8 @@ export class CarOwner2 {\n @Property()\n name: string;\n \n- @ManyToOne(() => Car2)\n- car?: Car2;\n+ @ManyToOne(() => Car2, { index: 'car_owner2_car_name_car_year_idx' })\n+ car!: Car2;\n \n constructor(name: string) {\n this.name = name;\n", "mysql-schema.sql": "@@ -77,8 +77,7 @@ alter table `car2` add index `car2_year_index`(`year`);\n alter table `car2` add primary key `car2_pkey`(`name`, `year`);\n \n create table `car_owner2` (`id` int unsigned not null auto_increment primary key, `name` varchar(255) not null, `car_name` varchar(255) not null, `car_year` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table `car_owner2` add index `car_owner2_car_name_index`(`car_name`);\n-alter table `car_owner2` add index `car_owner2_car_year_index`(`car_year`);\n+alter table `car_owner2` add index `car_owner2_car_name_car_year_idx`(`car_name`, `car_year`);\n \n create table `user2` (`first_name` varchar(100) not null, `last_name` varchar(100) not null, `foo` int(11) null) default character set utf8 engine = InnoDB;\n alter table `user2` add index `user2_first_name_index`(`first_name`);\n@@ -109,11 +108,9 @@ alter table `publisher2_to_test2` add index `publisher2_to_test2_publisher2_id_i\n alter table `publisher2_to_test2` add index `publisher2_to_test2_test2_id_index`(`test2_id`);\n \n create table `user2_to_car2` (`user2_first_name` varchar(100) not null, `user2_last_name` varchar(100) not null, `car2_name` varchar(100) not null, `car2_year` int(11) unsigned not null) default character set utf8 engine = InnoDB;\n-alter table `user2_to_car2` add index `user2_to_car2_user2_first_name_index`(`user2_first_name`);\n-alter table `user2_to_car2` add index `user2_to_car2_user2_last_name_index`(`user2_last_name`);\n-alter table `user2_to_car2` add index `user2_to_car2_car2_name_index`(`car2_name`);\n-alter table `user2_to_car2` add index `user2_to_car2_car2_year_index`(`car2_year`);\n alter table `user2_to_car2` add primary key `user2_to_car2_pkey`(`user2_first_name`, `user2_last_name`, `car2_name`, `car2_year`);\n+alter table `user2_to_car2` add index `user2_to_car2_user2_first_name_user2_last_name_index`(`user2_first_name`, `user2_last_name`);\n+alter table `user2_to_car2` add index `user2_to_car2_car2_name_car2_year_index`(`car2_name`, `car2_year`);\n \n alter table `author2` add constraint `author2_favourite_book_uuid_pk_foreign` foreign key (`favourite_book_uuid_pk`) references `book2` (`uuid_pk`) on update no action on delete cascade;\n alter table `author2` add constraint `author2_favourite_author_id_foreign` foreign key (`favourite_author_id`) references `author2` (`id`) on update cascade on delete set null;\n", "postgre-schema.sql": "@@ -17,6 +17,7 @@ drop table if exists \"book2_to_book_tag2\" cascade;\n drop table if exists \"book_to_tag_unordered\" cascade;\n drop table if exists \"publisher2_to_test2\" cascade;\n drop table if exists \"label2\" cascade;\n+drop table if exists \"new_table\" cascade;\n \n create table \"author2\" (\"id\" serial primary key, \"created_at\" timestamptz(3) not null default current_timestamp(3), \"updated_at\" timestamptz(3) not null default current_timestamp(3), \"name\" varchar(255) not null, \"email\" varchar(255) not null, \"age\" int4 null, \"terms_accepted\" bool not null default false, \"optional\" bool null, \"identities\" json null, \"born\" date null, \"born_time\" time(0) null, \"favourite_book_uuid_pk\" varchar(36) null, \"favourite_author_id\" int4 null);\n alter table \"author2\" add constraint \"custom_email_unique_name\" unique (\"email\");\n"}
docs(security): add a security policy Add a security policy.
33e9f2623b5a0802669d1ccef37c82a7fab7e1e3
docs
https://github.com/ibis-project/ibis/commit/33e9f2623b5a0802669d1ccef37c82a7fab7e1e3
add a security policy Add a security policy.
{"SECURITY.md": "@@ -0,0 +1,11 @@\n+# Security Policy\n+\n+## Supported Versions\n+\n+Security updates are provided by releasing a new version of Ibis.\n+\n+## Reporting a Vulnerability\n+\n+- Send security reports to [email protected]\n+- Vulnerability reports are published on GitHub at https://github.com/ibis-project/ibis/security/advisories\n+- If a vulnerability is accepted we will attempt to address it as soon as possible, by cutting a new release.\n"}
chore: name the spawned thread
8bb91cfe111fe6301e356a3ba762025ad4275c9a
chore
https://github.com/erg-lang/erg/commit/8bb91cfe111fe6301e356a3ba762025ad4275c9a
name the spawned thread
{"spawn.rs": "@@ -15,7 +15,7 @@ macro_rules! enable_overflow_stacktrace {\n /// Execute a function in a new thread on Windows, otherwise just run it.\n ///\n /// Windows has a smaller default stack size than other OSs, which may cause a stack overflow, especially in the parsing process.\n-pub fn exec_new_thread<F, T>(run: F) -> T\n+pub fn exec_new_thread<F, T>(run: F, name: &str) -> T\n where\n F: FnOnce() -> T + Send + 'static,\n T: Send + 'static,\n@@ -24,6 +24,7 @@ where\n if cfg!(windows) || cfg!(feature = \"large_thread\") {\n const STACK_SIZE: usize = 4 * 1024 * 1024;\n let child = thread::Builder::new()\n+ .name(name.to_string())\n .stack_size(STACK_SIZE)\n .spawn(run)\n .unwrap();\n", "main.rs": "@@ -49,5 +49,5 @@ fn run() {\n }\n \n fn main() {\n- exec_new_thread(run);\n+ exec_new_thread(run, \"erg\");\n }\n", "parse_test.rs": "@@ -93,7 +93,7 @@ fn _parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErro\n }\n \n fn parse_test_from_code(file_path: &'static str) -> Result<(), ParserRunnerErrors> {\n- exec_new_thread(move || _parse_test_from_code(file_path))\n+ exec_new_thread(move || _parse_test_from_code(file_path), file_path)\n }\n \n fn expect_success(file_path: &'static str) -> Result<(), ()> {\n", "common.rs": "@@ -149,12 +149,12 @@ pub fn _exec_repl(name: &'static str, lines: Vec<String>) -> Result<ExitStatus,\n }\n \n pub(crate) fn exec_file(file_path: &'static str) -> Result<i32, CompileErrors> {\n- exec_new_thread(move || _exec_file(file_path))\n+ exec_new_thread(move || _exec_file(file_path), file_path)\n }\n \n pub(crate) fn exec_repl(\n name: &'static str,\n lines: Vec<String>,\n ) -> Result<ExitStatus, CompileErrors> {\n- exec_new_thread(move || _exec_repl(name, lines))\n+ exec_new_thread(move || _exec_repl(name, lines), name)\n }\n"}
fix: cancel the current `runAsync` call when Controller#stop is called with no arguments
0054ffe9a6c9f1c3c616e23d2cf38467fcf69cd0
fix
https://github.com/pmndrs/react-spring/commit/0054ffe9a6c9f1c3c616e23d2cf38467fcf69cd0
cancel the current `runAsync` call when Controller#stop is called with no arguments
{"Controller.ts": "@@ -136,6 +136,7 @@ export class Controller<State extends Lookup = Lookup>\n stop(keys?: OneOrMore<string>) {\n if (is.und(keys)) {\n this.each(spring => spring.stop())\n+ cancelAsync(this._state, this._lastAsyncId)\n } else {\n const springs = this.springs as Lookup<SpringValue>\n each(toArray(keys), key => springs[key].stop())\n"}
chore(error-reporting): add more thorough information about slicing a non-sliceable column
7cd583551aab9da1d82a9d2dcfaa9d0add67f4a7
chore
https://github.com/ibis-project/ibis/commit/7cd583551aab9da1d82a9d2dcfaa9d0add67f4a7
add more thorough information about slicing a non-sliceable column
{"arrays.py": "@@ -789,7 +789,8 @@ class ArrayScalar(Scalar, ArrayValue):\n \n @public\n class ArrayColumn(Column, ArrayValue):\n- pass\n+ def __getitem__(self, index: int | ir.IntegerValue | slice) -> ir.Column:\n+ return ArrayValue.__getitem__(self, index)\n \n \n @public\n", "generic.py": "@@ -986,6 +986,12 @@ class Column(Value, _FixedTextJupyterMixin):\n \n __array_ufunc__ = None\n \n+ def __getitem__(self, _):\n+ raise TypeError(\n+ f\"{self.__class__.__name__!r} is not subscriptable: \"\n+ \"see https://ibis-project.org/tutorial/ibis-for-pandas-users/#ibis-for-pandas-users for details.\"\n+ )\n+\n def __array__(self, dtype=None):\n return self.execute().__array__(dtype)\n \n", "json.py": "@@ -132,4 +132,7 @@ class JSONScalar(Scalar, JSONValue):\n \n @public\n class JSONColumn(Column, JSONValue):\n- pass\n+ def __getitem__(\n+ self, key: str | int | ir.StringValue | ir.IntegerValue\n+ ) -> JSONColumn:\n+ return JSONValue.__getitem__(self, key)\n", "maps.py": "@@ -388,7 +388,8 @@ class MapScalar(Scalar, MapValue):\n \n @public\n class MapColumn(Column, MapValue):\n- pass\n+ def __getitem__(self, key: ir.Value) -> ir.Column:\n+ return MapValue.__getitem__(self, key)\n \n \n @public\n", "strings.py": "@@ -1523,4 +1523,5 @@ class StringScalar(Scalar, StringValue):\n \n @public\n class StringColumn(Column, StringValue):\n- pass\n+ def __getitem__(self, key: slice | int | ir.IntegerScalar) -> StringColumn:\n+ return StringValue.__getitem__(self, key)\n", "structs.py": "@@ -396,4 +396,5 @@ class StructScalar(Scalar, StructValue):\n \n @public\n class StructColumn(Column, StructValue):\n- pass\n+ def __getitem__(self, name: str) -> ir.Column:\n+ return StructValue.__getitem__(self, name)\n", "test_operations.py": "@@ -278,3 +278,13 @@ def test_sortkey_propagates_dtype_and_shape():\n k = ops.SortKey(t.a, ascending=True)\n assert k.output_dtype == dt.int16\n assert k.output_shape == rlz.Shape.COLUMNAR\n+\n+\n+def test_getitem_on_column_is_error():\n+ t = ibis.table(dict(a=\"int\"))\n+\n+ with pytest.raises(TypeError, match=\"#ibis-for-pandas-users\"):\n+ t.a[0]\n+\n+ with pytest.raises(TypeError, match=\"#ibis-for-pandas-users\"):\n+ t.a[:1]\n"}
chore: bump version
ba622499bcee94f6c4aeeecdba2f4aa1f51c6e4c
chore
https://github.com/mikro-orm/mikro-orm/commit/ba622499bcee94f6c4aeeecdba2f4aa1f51c6e4c
bump version
{"lerna.json": "@@ -1,6 +1,6 @@\n {\n \"packages\": [\"packages/*\"],\n- \"version\": \"4.0.0-alpha.12\",\n+ \"version\": \"4.0.0-rc.0\",\n \"command\": {\n \"version\": {\n \"conventionalCommits\": true,\n", "package.json": "@@ -1,6 +1,6 @@\n {\n \"name\": \"@mikro-orm/sqlite\",\n- \"version\": \"4.0.0-alpha.12\",\n+ \"version\": \"4.0.0-rc.0\",\n \"description\": \"TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.\",\n \"main\": \"dist/index.js\",\n \"typings\": \"dist/index.d.ts\",\n@@ -47,14 +47,14 @@\n \"access\": \"public\"\n },\n \"dependencies\": {\n- \"@mikro-orm/knex\": \"^4.0.0-alpha.12\",\n+ \"@mikro-orm/knex\": \"^4.0.0-rc.0\",\n \"fs-extra\": \"^9.0.1\",\n \"sqlite3\": \"^4.2.0\"\n },\n \"devDependencies\": {\n- \"@mikro-orm/core\": \"^4.0.0-alpha.12\"\n+ \"@mikro-orm/core\": \"^4.0.0-rc.0\"\n },\n \"peerDependencies\": {\n- \"@mikro-orm/core\": \"^4.0.0-alpha.12\"\n+ \"@mikro-orm/core\": \"^4.0.0-rc.0\"\n }\n }\n"}
feat(trino): support `ntile`
2978d1adb729124b8f706e5f66b0d9e50aa50720
feat
https://github.com/rohankumardubey/ibis/commit/2978d1adb729124b8f706e5f66b0d9e50aa50720
support `ntile`
{"registry.py": "@@ -515,7 +515,6 @@ operation_registry.update(\n )\n \n _invalid_operations = {\n- ops.NTile,\n # ibis.expr.operations.reductions\n ops.MultiQuantile,\n ops.Quantile,\n"}
chore(deps): bump ruff to 0.3.0 (#8503)
ef39aabc97f75c6e22e18b5a9aa7488d45073356
chore
https://github.com/ibis-project/ibis/commit/ef39aabc97f75c6e22e18b5a9aa7488d45073356
bump ruff to 0.3.0 (#8503)
{"step1.py": "@@ -5,8 +5,7 @@ from ibis import _, udf\n \n \n @udf.scalar.builtin\n-def flatten(x: list[list[str]]) -> list[str]:\n- ...\n+def flatten(x: list[list[str]]) -> list[str]: ...\n \n \n expr = (\n", "step2.py": "@@ -5,8 +5,7 @@ from ibis import _, udf\n \n \n @udf.scalar.builtin\n-def flatten(x: list[list[str]]) -> list[str]:\n- ...\n+def flatten(x: list[list[str]]) -> list[str]: ...\n \n \n expr = (\n", "model.py": "@@ -2,6 +2,7 @@\n \n Adapted from https://gist.github.com/pdet/e8d38734232c08e6c15aba79b4eb8368#file-taxi_prediction_example-py.\n \"\"\"\n+\n from __future__ import annotations\n \n import pyarrow as pa\n", "__init__.py": "@@ -381,8 +381,7 @@ class Backend(SQLBackend):\n signature = list(map(split_name_type, signature))\n \n # dummy callable\n- def fake_func(*args, **kwargs):\n- ...\n+ def fake_func(*args, **kwargs): ...\n \n fake_func.__name__ = name\n fake_func.__signature__ = inspect.Signature(\n", "converter.py": "@@ -11,13 +11,9 @@ class PostgresPandasData(PandasData):\n \n return gpd.GeoSeries(shp.from_wkb(s.map(bytes, na_action=\"ignore\")))\n \n- convert_Point = (\n- convert_LineString\n- ) = (\n- convert_Polygon\n- ) = (\n- convert_MultiLineString\n- ) = convert_MultiPoint = convert_MultiPolygon = convert_GeoSpatial\n+ convert_Point = convert_LineString = convert_Polygon = convert_MultiLineString = (\n+ convert_MultiPoint\n+ ) = convert_MultiPolygon = convert_GeoSpatial\n \n @classmethod\n def convert_Binary(cls, s, dtype, pandas_type):\n", "test_udf_execute.py": "@@ -148,8 +148,7 @@ def test_builtin_scalar(con, value, expected):\n from ibis import udf\n \n @udf.scalar.builtin\n- def bit_count(x: bytes) -> int:\n- ...\n+ def bit_count(x: bytes) -> int: ...\n \n expr = bit_count(value)\n result = con.execute(expr)\n@@ -168,8 +167,7 @@ def test_builtin_agg(con, where, expected):\n from ibis import udf\n \n @udf.agg.builtin(name=\"array_concat_agg\")\n- def concat_agg(x, where: bool = True) -> dt.Array[str]:\n- ...\n+ def concat_agg(x, where: bool = True) -> dt.Array[str]: ...\n \n t = ibis.memtable({\"a\": [list(\"abc\"), list(\"def\")]})\n expr = concat_agg(t.a, **where)\n", "test_builtin.py": "@@ -6,8 +6,7 @@ to_sql = ibis.bigquery.compile\n \n \n @ibis.udf.scalar.builtin\n-def farm_fingerprint(value: bytes) -> int:\n- ...\n+def farm_fingerprint(value: bytes) -> int: ...\n \n \n @ibis.udf.scalar.builtin(schema=\"fn\", database=\"bqutil\")\n", "test_client.py": "@@ -262,18 +262,15 @@ def test_truncate_table(con, engine, temp_table):\n \n \n @udf.scalar.builtin(name=\"arrayJaccardIndex\")\n-def array_jaccard_index(a: dt.Array[dt.int64], b: dt.Array[dt.int64]) -> float:\n- ...\n+def array_jaccard_index(a: dt.Array[dt.int64], b: dt.Array[dt.int64]) -> float: ...\n \n \n @udf.scalar.builtin(name=\"arrayJaccardIndex\")\n-def array_jaccard_index_no_input_types(a, b) -> float:\n- ...\n+def array_jaccard_index_no_input_types(a, b) -> float: ...\n \n \n @udf.scalar.builtin\n-def arrayJaccardIndex(a: dt.Array[dt.int64], b: dt.Array[dt.int64]) -> float:\n- ...\n+def arrayJaccardIndex(a: dt.Array[dt.int64], b: dt.Array[dt.int64]) -> float: ...\n \n \n @pytest.mark.parametrize(\n@@ -292,13 +289,11 @@ def test_builtin_scalar_udf(con, func):\n \n \n @udf.agg.builtin\n-def entropy(a) -> float:\n- ...\n+def entropy(a) -> float: ...\n \n \n @udf.agg.builtin(name=\"sumKahan\")\n-def sum_kahan(a: float) -> float:\n- ...\n+def sum_kahan(a: float) -> float: ...\n \n \n @pytest.mark.parametrize(\"func\", [entropy, sum_kahan])\n", "test_functions.py": "@@ -473,8 +473,7 @@ def test_hash(alltypes, snapshot):\n \n def test_udf_in_array_map(alltypes):\n @udf.scalar.builtin(name=\"plus\")\n- def my_add(a: int, b: int) -> int:\n- ...\n+ def my_add(a: int, b: int) -> int: ...\n \n n = 5\n expr = (\n@@ -489,8 +488,7 @@ def test_udf_in_array_map(alltypes):\n \n def test_udf_in_array_filter(alltypes):\n @udf.scalar.builtin(name=\"equals\")\n- def my_eq(a: int, b: int) -> bool:\n- ...\n+ def my_eq(a: int, b: int) -> bool: ...\n \n expr = alltypes.int_col.collect().filter(lambda x: my_eq(x, 1))\n result = expr.execute()\n", "convert.py": "@@ -40,9 +40,9 @@ class PandasConverter(DataMapper):\n \n convert_SignedInteger = convert_UnsignedInteger = convert_Integer\n convert_Int64 = convert_Int32 = convert_Int16 = convert_Int8 = convert_SignedInteger\n- convert_UInt64 = (\n- convert_UInt32\n- ) = convert_UInt16 = convert_UInt8 = convert_UnsignedInteger\n+ convert_UInt64 = convert_UInt32 = convert_UInt16 = convert_UInt8 = (\n+ convert_UnsignedInteger\n+ )\n \n @classmethod\n def convert_Floating(cls, s, dtype, pandas_type):\n", "compiler.py": "@@ -898,9 +898,9 @@ class SQLGlotCompiler(abc.ABC):\n funcname = f\"{funcs[type(op)]}_{hows[how]}\"\n return self.agg[funcname](*args, where=where)\n \n- visit_Variance = (\n- visit_StandardDev\n- ) = visit_Covariance = visit_VarianceStandardDevCovariance\n+ visit_Variance = visit_StandardDev = visit_Covariance = (\n+ visit_VarianceStandardDevCovariance\n+ )\n \n def visit_Arbitrary(self, op, *, arg, how, where):\n if how == \"heavy\":\n@@ -1369,11 +1369,9 @@ class SQLGlotCompiler(abc.ABC):\n def visit_Subtract(self, op, *, left, right):\n return sge.Sub(this=left, expression=right)\n \n- visit_DateSub = (\n- visit_DateDiff\n- ) = (\n- visit_TimestampSub\n- ) = visit_TimestampDiff = visit_IntervalSubtract = visit_Subtract\n+ visit_DateSub = visit_DateDiff = visit_TimestampSub = visit_TimestampDiff = (\n+ visit_IntervalSubtract\n+ ) = visit_Subtract\n \n @parenthesize_inputs\n def visit_Multiply(self, op, *, left, right):\n", "test_udf.py": "@@ -95,8 +95,7 @@ def test_vectorized_udf_operations(table, klass, output_type):\n )\n def test_udf_from_annotations(dec, table):\n @dec\n- def myfunc(x: int, y: str) -> float:\n- ...\n+ def myfunc(x: int, y: str) -> float: ...\n \n assert myfunc(table.a, table.b).type().is_floating()\n \n@@ -117,8 +116,7 @@ def test_udf_from_annotations(dec, table):\n )\n def test_udf_from_sig(dec, table):\n @dec(signature=((int, str), float))\n- def myfunc(x, y):\n- ...\n+ def myfunc(x, y): ...\n \n assert myfunc(table.a, table.b).type().is_floating()\n \n@@ -139,8 +137,7 @@ def test_udf_from_sig(dec, table):\n )\n def test_udf_deferred(dec, table):\n @dec\n- def myfunc(x: int) -> int:\n- ...\n+ def myfunc(x: int) -> int: ...\n \n expr = myfunc(_.a)\n assert isinstance(expr, Deferred)\n@@ -150,8 +147,7 @@ def test_udf_deferred(dec, table):\n \n def test_builtin_scalar_noargs():\n @ibis.udf.scalar.builtin\n- def version() -> str:\n- ...\n+ def version() -> str: ...\n \n expr = version()\n assert expr.type().is_string()\n", "utils.py": "@@ -104,18 +104,15 @@ class FlinkIntervalType(ABC):\n @classmethod\n @property\n @abstractmethod\n- def units(self):\n- ...\n+ def units(self): ...\n \n @classmethod\n @property\n @abstractmethod\n- def factors(self):\n- ...\n+ def factors(self): ...\n \n @abstractmethod\n- def _convert_to_highest_resolution(self):\n- ...\n+ def _convert_to_highest_resolution(self): ...\n \n def _convert_to_combined_units(self) -> dict:\n converted_total = self._convert_to_highest_resolution()\n@@ -132,12 +129,10 @@ class FlinkIntervalType(ABC):\n return interval_segments\n \n @abstractmethod\n- def _calculate_precisions(self) -> dict:\n- ...\n+ def _calculate_precisions(self) -> dict: ...\n \n @abstractmethod\n- def format_as_string(self, interval_segments: dict, precisions: dict) -> str:\n- ...\n+ def format_as_string(self, interval_segments: dict, precisions: dict) -> str: ...\n \n \n class YearsToMonthsInterval(FlinkIntervalType):\n", "udf.py": "@@ -30,9 +30,9 @@ if TYPE_CHECKING:\n EMPTY = inspect.Parameter.empty\n \n \n-_udf_name_cache: MutableMapping[\n- type[ops.Node], Iterable[int]\n-] = collections.defaultdict(itertools.count)\n+_udf_name_cache: MutableMapping[type[ops.Node], Iterable[int]] = (\n+ collections.defaultdict(itertools.count)\n+)\n \n \n def _make_udf_name(name: str) -> str:\n@@ -173,8 +173,7 @@ class scalar(_UDF):\n \n @overload\n @classmethod\n- def builtin(cls, fn: Callable) -> Callable[..., ir.Value]:\n- ...\n+ def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: ...\n \n @overload\n @classmethod\n@@ -186,8 +185,7 @@ class scalar(_UDF):\n database: str | None = None,\n signature: tuple[tuple[Any, ...], Any] | None = None,\n **kwargs: Any,\n- ) -> Callable[[Callable], Callable[..., ir.Value]]:\n- ...\n+ ) -> Callable[[Callable], Callable[..., ir.Value]]: ...\n \n @util.experimental\n @classmethod\n@@ -243,8 +241,7 @@ class scalar(_UDF):\n \n @overload\n @classmethod\n- def python(cls, fn: Callable) -> Callable[..., ir.Value]:\n- ...\n+ def python(cls, fn: Callable) -> Callable[..., ir.Value]: ...\n \n @overload\n @classmethod\n@@ -256,8 +253,7 @@ class scalar(_UDF):\n database: str | None = None,\n signature: tuple[tuple[Any, ...], Any] | None = None,\n **kwargs: Any,\n- ) -> Callable[[Callable], Callable[..., ir.Value]]:\n- ...\n+ ) -> Callable[[Callable], Callable[..., ir.Value]]: ...\n \n @util.experimental\n @classmethod\n@@ -327,8 +323,7 @@ class scalar(_UDF):\n \n @overload\n @classmethod\n- def pandas(cls, fn: Callable) -> Callable[..., ir.Value]:\n- ...\n+ def pandas(cls, fn: Callable) -> Callable[..., ir.Value]: ...\n \n @overload\n @classmethod\n@@ -340,8 +335,7 @@ class scalar(_UDF):\n database: str | None = None,\n signature: tuple[tuple[Any, ...], Any] | None = None,\n **kwargs: Any,\n- ) -> Callable[[Callable], Callable[..., ir.Value]]:\n- ...\n+ ) -> Callable[[Callable], Callable[..., ir.Value]]: ...\n \n @util.experimental\n @classmethod\n@@ -400,8 +394,7 @@ class scalar(_UDF):\n \n @overload\n @classmethod\n- def pyarrow(cls, fn: Callable) -> Callable[..., ir.Value]:\n- ...\n+ def pyarrow(cls, fn: Callable) -> Callable[..., ir.Value]: ...\n \n @overload\n @classmethod\n@@ -413,8 +406,7 @@ class scalar(_UDF):\n database: str | None = None,\n signature: tuple[tuple[Any, ...], Any] | None = None,\n **kwargs: Any,\n- ) -> Callable[[Callable], Callable[..., ir.Value]]:\n- ...\n+ ) -> Callable[[Callable], Callable[..., ir.Value]]: ...\n \n @util.experimental\n @classmethod\n@@ -479,8 +471,7 @@ class agg(_UDF):\n \n @overload\n @classmethod\n- def builtin(cls, fn: Callable) -> Callable[..., ir.Value]:\n- ...\n+ def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: ...\n \n @overload\n @classmethod\n@@ -492,8 +483,7 @@ class agg(_UDF):\n database: str | None = None,\n signature: tuple[tuple[Any, ...], Any] | None = None,\n **kwargs: Any,\n- ) -> Callable[[Callable], Callable[..., ir.Value]]:\n- ...\n+ ) -> Callable[[Callable], Callable[..., ir.Value]]: ...\n \n @util.experimental\n @classmethod\n", "test_geospatial.py": "@@ -1,4 +1,5 @@\n \"\"\"Tests for geo spatial data types.\"\"\"\n+\n from __future__ import annotations\n \n import numpy as np\n", "test_json.py": "@@ -1,4 +1,5 @@\n \"\"\"Tests for JSON operations.\"\"\"\n+\n from __future__ import annotations\n \n import sqlite3\n", "datatypes.py": "@@ -356,11 +356,7 @@ class SqlglotType(TypeMapper):\n return sge.DataType(this=getattr(typecode, geotype.upper()))\n return sge.DataType(this=typecode.GEOMETRY)\n \n- _from_ibis_Point = (\n- _from_ibis_LineString\n- ) = (\n- _from_ibis_Polygon\n- ) = (\n+ _from_ibis_Point = _from_ibis_LineString = _from_ibis_Polygon = (\n _from_ibis_MultiLineString\n ) = _from_ibis_MultiPoint = _from_ibis_MultiPolygon = _from_ibis_GeoSpatial\n \n@@ -734,11 +730,9 @@ class BigQueryType(SqlglotType):\n def _from_sqlglot_TINYINT(cls) -> dt.Int64:\n return dt.Int64(nullable=cls.default_nullable)\n \n- _from_sqlglot_UINT = (\n- _from_sqlglot_USMALLINT\n- ) = (\n- _from_sqlglot_UTINYINT\n- ) = _from_sqlglot_INT = _from_sqlglot_SMALLINT = _from_sqlglot_TINYINT\n+ _from_sqlglot_UINT = _from_sqlglot_USMALLINT = _from_sqlglot_UTINYINT = (\n+ _from_sqlglot_INT\n+ ) = _from_sqlglot_SMALLINT = _from_sqlglot_TINYINT\n \n @classmethod\n def _from_sqlglot_UBIGINT(cls) -> NoReturn:\n", "ddl.py": "@@ -26,12 +26,10 @@ def _is_quoted(x):\n class Base(ABC):\n @property\n @abstractmethod\n- def dialect(self):\n- ...\n+ def dialect(self): ...\n \n @abstractmethod\n- def compile(self):\n- ...\n+ def compile(self): ...\n \n def quote(self, ident):\n return sg.to_identifier(ident, quoted=True).sql(dialect=self.dialect)\n@@ -48,8 +46,7 @@ class Base(ABC):\n )\n \n @abstractmethod\n- def format_dtype(self, dtype):\n- ...\n+ def format_dtype(self, dtype): ...\n \n def format_schema(self, schema):\n elements = [\n", "rewrites.py": "@@ -1,4 +1,5 @@\n \"\"\"Some common rewrite functions to be shared between backends.\"\"\"\n+\n from __future__ import annotations\n \n import functools\n", "errors.py": "@@ -6,9 +6,9 @@ try:\n from duckdb import NotImplementedException as DuckDBNotImplementedException\n from duckdb import ParserException as DuckDBParserException\n except ImportError:\n- DuckDBConversionException = (\n- DuckDBInvalidInputException\n- ) = DuckDBParserException = DuckDBNotImplementedException = None\n+ DuckDBConversionException = DuckDBInvalidInputException = DuckDBParserException = (\n+ DuckDBNotImplementedException\n+ ) = None\n \n try:\n from clickhouse_connect.driver.exceptions import (\n@@ -21,9 +21,9 @@ try:\n OperationalError as ClickHouseOperationalError,\n )\n except ImportError:\n- ClickHouseDatabaseError = (\n- ClickHouseInternalError\n- ) = ClickHouseOperationalError = None\n+ ClickHouseDatabaseError = ClickHouseInternalError = ClickHouseOperationalError = (\n+ None\n+ )\n \n \n try:\n@@ -39,9 +39,9 @@ try:\n from pyspark.sql.utils import ParseException as PySparkParseException\n from pyspark.sql.utils import PythonException as PySparkPythonException\n except ImportError:\n- PySparkAnalysisException = (\n- PySparkIllegalArgumentException\n- ) = PySparkParseException = PySparkPythonException = None\n+ PySparkAnalysisException = PySparkIllegalArgumentException = (\n+ PySparkParseException\n+ ) = PySparkPythonException = None\n \n try:\n # PySpark 3.5.0\n@@ -106,17 +106,11 @@ try:\n from psycopg2.errors import SyntaxError as PsycoPg2SyntaxError\n from psycopg2.errors import UndefinedObject as PsycoPg2UndefinedObject\n except ImportError:\n- PsycoPg2SyntaxError = (\n- PsycoPg2IndeterminateDatatype\n- ) = (\n+ PsycoPg2SyntaxError = PsycoPg2IndeterminateDatatype = (\n PsycoPg2InvalidTextRepresentation\n- ) = (\n- PsycoPg2DivisionByZero\n- ) = (\n- PsycoPg2InternalError\n- ) = (\n- PsycoPg2ProgrammingError\n- ) = PsycoPg2OperationalError = PsycoPg2UndefinedObject = None\n+ ) = PsycoPg2DivisionByZero = PsycoPg2InternalError = PsycoPg2ProgrammingError = (\n+ PsycoPg2OperationalError\n+ ) = PsycoPg2UndefinedObject = None\n \n try:\n from pymysql.err import NotSupportedError as MySQLNotSupportedError\n", "test_interactive.py": "@@ -47,8 +47,9 @@ def test_repr_png_is_none_in_interactive(table):\n def test_repr_png_is_not_none_in_not_interactive(table):\n pytest.importorskip(\"ibis.expr.visualize\")\n \n- with config.option_context(\"interactive\", False), config.option_context(\n- \"graphviz_repr\", True\n+ with (\n+ config.option_context(\"interactive\", False),\n+ config.option_context(\"graphviz_repr\", True),\n ):\n assert table._repr_png_() is not None\n \n", "bases.py": "@@ -128,8 +128,7 @@ class Final(Abstract):\n @collections.abc.Hashable.register\n class Hashable(Abstract):\n @abstractmethod\n- def __hash__(self) -> int:\n- ...\n+ def __hash__(self) -> int: ...\n \n \n class Comparable(Abstract):\n@@ -154,8 +153,7 @@ class Comparable(Abstract):\n return NotImplemented\n \n @abstractmethod\n- def __equals__(self, other) -> bool:\n- ...\n+ def __equals__(self, other) -> bool: ...\n \n def __cached_equals__(self, other) -> bool:\n if self is other:\n", "collections.py": "@@ -28,8 +28,7 @@ class Iterable(Abstract, Generic[V]):\n \"\"\"Iterable abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __iter__(self):\n- ...\n+ def __iter__(self): ...\n \n \n @collections.abc.Reversible.register\n@@ -37,8 +36,7 @@ class Reversible(Iterable[V]):\n \"\"\"Reverse iterable abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __reversed__(self):\n- ...\n+ def __reversed__(self): ...\n \n \n @collections.abc.Iterator.register\n@@ -46,8 +44,7 @@ class Iterator(Iterable[V]):\n \"\"\"Iterator abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __next__(self):\n- ...\n+ def __next__(self): ...\n \n def __iter__(self):\n return self\n@@ -58,8 +55,7 @@ class Sized(Abstract):\n \"\"\"Sized abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __len__(self):\n- ...\n+ def __len__(self): ...\n \n \n @collections.abc.Container.register\n@@ -67,8 +63,7 @@ class Container(Abstract, Generic[V]):\n \"\"\"Container abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __contains__(self, x):\n- ...\n+ def __contains__(self, x): ...\n \n \n @collections.abc.Collection.register\n@@ -81,8 +76,7 @@ class Sequence(Reversible[V], Collection[V]):\n \"\"\"Sequence abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __getitem__(self, index):\n- ...\n+ def __getitem__(self, index): ...\n \n def __iter__(self):\n i = 0\n@@ -126,8 +120,7 @@ class Mapping(Collection[K], Generic[K, V]):\n \"\"\"Mapping abstract base class for quicker isinstance checks.\"\"\"\n \n @abstractmethod\n- def __getitem__(self, key):\n- ...\n+ def __getitem__(self, key): ...\n \n def get(self, key, default=None):\n try:\n", "deferred.py": "@@ -42,8 +42,7 @@ class Resolver(Coercible, Hashable):\n \"\"\"\n \n @abstractmethod\n- def __eq__(self, other: Resolver) -> bool:\n- ...\n+ def __eq__(self, other: Resolver) -> bool: ...\n \n @classmethod\n def __coerce__(cls, value):\n@@ -570,13 +569,11 @@ F = TypeVar(\"F\", bound=Callable)\n \n \n @overload\n-def deferrable(*, repr: str | None = None) -> Callable[[F], F]:\n- ...\n+def deferrable(*, repr: str | None = None) -> Callable[[F], F]: ...\n \n \n @overload\n-def deferrable(func: F) -> F:\n- ...\n+def deferrable(func: F) -> F: ...\n \n \n def deferrable(func=None, *, repr=None):\n", "graph.py": "@@ -1,4 +1,5 @@\n \"\"\"Various traversal utilities for the expression graph.\"\"\"\n+\n from __future__ import annotations\n \n from abc import abstractmethod\n", "patterns.py": "@@ -220,8 +220,7 @@ class Pattern(Hashable):\n return f\"matching {self!r}\"\n \n @abstractmethod\n- def __eq__(self, other: Pattern) -> bool:\n- ...\n+ def __eq__(self, other: Pattern) -> bool: ...\n \n def __invert__(self) -> Not:\n \"\"\"Syntax sugar for matching the inverse of the pattern.\"\"\"\n", "test_annotations.py": "@@ -179,8 +179,7 @@ def test_signature():\n \n \n def test_signature_from_callable():\n- def test(a: int, b: int, c: int = 1):\n- ...\n+ def test(a: int, b: int, c: int = 1): ...\n \n sig = Signature.from_callable(test)\n assert sig.validate(test, args=(2, 3), kwargs={}) == {\"a\": 2, \"b\": 3, \"c\": 1}\n@@ -194,8 +193,7 @@ def test_signature_from_callable():\n \n \n def test_signature_from_callable_with_varargs():\n- def test(a: int, b: int, *args: int):\n- ...\n+ def test(a: int, b: int, *args: int): ...\n \n sig = Signature.from_callable(test)\n assert sig.validate(test, args=(2, 3), kwargs={}) == {\"a\": 2, \"b\": 3, \"args\": ()}\n@@ -222,8 +220,7 @@ def test_signature_from_callable_with_varargs():\n \n \n def test_signature_from_callable_with_positional_only_arguments(snapshot):\n- def test(a: int, b: int, /, c: int = 1):\n- ...\n+ def test(a: int, b: int, /, c: int = 1): ...\n \n sig = Signature.from_callable(test)\n assert sig.validate(test, args=(2, 3), kwargs={}) == {\"a\": 2, \"b\": 3, \"c\": 1}\n@@ -240,8 +237,7 @@ def test_signature_from_callable_with_positional_only_arguments(snapshot):\n \n \n def test_signature_from_callable_with_keyword_only_arguments(snapshot):\n- def test(a: int, b: int, *, c: float, d: float = 0.0):\n- ...\n+ def test(a: int, b: int, *, c: float, d: float = 0.0): ...\n \n sig = Signature.from_callable(test)\n assert sig.validate(test, args=(2, 3), kwargs=dict(c=4.0)) == {\n@@ -476,8 +472,7 @@ def test_annotated_function_with_varkwargs():\n \n def test_multiple_validation_failures():\n @annotated\n- def test(a: float, b: float, *args: int, **kwargs: int):\n- ...\n+ def test(a: float, b: float, *args: int, **kwargs: int): ...\n \n with pytest.raises(ValidationError) as excinfo:\n test(1.0, 2.0, 3.0, 4, c=5.0, d=6)\n", "test_bases.py": "@@ -30,13 +30,11 @@ def test_classes_are_based_on_abstract():\n def test_abstract():\n class Foo(Abstract):\n @abstractmethod\n- def foo(self):\n- ...\n+ def foo(self): ...\n \n @property\n @abstractmethod\n- def bar(self):\n- ...\n+ def bar(self): ...\n \n assert not issubclass(type(Foo), ABCMeta)\n assert issubclass(type(Foo), AbstractMeta)\n", "test_patterns.py": "@@ -329,11 +329,9 @@ def test_generic_coerced_to():\n def __coerce__(cls, value, T=..., S=...):\n return cls(value, Scalar())\n \n- def dtype(self) -> T:\n- ...\n+ def dtype(self) -> T: ...\n \n- def shape(self) -> S:\n- ...\n+ def shape(self) -> S: ...\n \n class Literal(Value[T, Scalar]):\n __slots__ = (\"_value\", \"_dtype\")\n", "test_typing.py": "@@ -24,16 +24,14 @@ class My(Generic[T, S, U]):\n c: str\n \n @property\n- def d(self) -> Optional[str]:\n- ...\n+ def d(self) -> Optional[str]: ...\n \n @property\n def e(self) -> U: # type: ignore\n ...\n \n \n-class MyChild(My):\n- ...\n+class MyChild(My): ...\n \n \n def example(a: int, b: str) -> str: # type: ignore\n@@ -86,16 +84,13 @@ class A(Generic[T, S, U]):\n ...\n \n \n-class B(A[T, S, bytes]):\n- ...\n+class B(A[T, S, bytes]): ...\n \n \n-class C(B[T, str]):\n- ...\n+class C(B[T, str]): ...\n \n \n-class D(C[bool]):\n- ...\n+class D(C[bool]): ...\n \n \n def test_get_type_params() -> None:\n", "typing.py": "@@ -91,16 +91,10 @@ def get_type_params(obj: Any) -> dict[str, type]:\n Examples\n --------\n >>> from typing import Dict, List\n- >>>\n- >>> class MyList(List[T]):\n- ... ...\n- >>>\n+ >>> class MyList(List[T]): ...\n >>> get_type_params(MyList[int])\n {'T': <class 'int'>}\n- >>>\n- >>> class MyDict(Dict[T, U]):\n- ... ...\n- >>>\n+ >>> class MyDict(Dict[T, U]): ...\n >>> get_type_params(MyDict[int, str])\n {'T': <class 'int'>, 'U': <class 'str'>}\n \n@@ -146,8 +140,7 @@ def get_bound_typevars(obj: Any) -> dict[TypeVar, tuple[str, type]]:\n ... a: T\n ...\n ... @property\n- ... def myprop(self) -> U:\n- ... ...\n+ ... def myprop(self) -> U: ...\n >>> get_bound_typevars(MyStruct[float, bytes])\n {~T: ('a', <class 'float'>), ~U: ('myprop', <class 'bytes'>)}\n \n@@ -252,8 +245,7 @@ class Sentinel(type):\n raise TypeError(\"Sentinels are not constructible\")\n \n \n-class CoercionError(Exception):\n- ...\n+class CoercionError(Exception): ...\n \n \n class Coercible(Abstract):\n@@ -266,5 +258,4 @@ class Coercible(Abstract):\n \n @classmethod\n @abstractmethod\n- def __coerce__(cls, value: Any, **kwargs: Any) -> Self:\n- ...\n+ def __coerce__(cls, value: Any, **kwargs: Any) -> Self: ...\n", "api.py": "@@ -745,13 +745,11 @@ def timestamp(\n second: int | ir.IntegerValue | Deferred,\n /,\n timezone: str | None = None,\n-) -> TimestampValue:\n- ...\n+) -> TimestampValue: ...\n \n \n @overload\n-def timestamp(value_or_year: Any, /, timezone: str | None = None) -> TimestampValue:\n- ...\n+def timestamp(value_or_year: Any, /, timezone: str | None = None) -> TimestampValue: ...\n \n \n @deferrable\n@@ -845,13 +843,11 @@ def date(\n month: int | ir.IntegerValue | Deferred,\n day: int | ir.IntegerValue | Deferred,\n /,\n-) -> DateValue:\n- ...\n+) -> DateValue: ...\n \n \n @overload\n-def date(value_or_year: Any, /) -> DateValue:\n- ...\n+def date(value_or_year: Any, /) -> DateValue: ...\n \n \n @deferrable\n@@ -916,13 +912,11 @@ def time(\n minute: int | ir.IntegerValue | Deferred,\n second: int | ir.IntegerValue | Deferred,\n /,\n-) -> TimeValue:\n- ...\n+) -> TimeValue: ...\n \n \n @overload\n-def time(value_or_hour: Any, /) -> TimeValue:\n- ...\n+def time(value_or_hour: Any, /) -> TimeValue: ...\n \n \n @deferrable\n", "core.py": "@@ -102,13 +102,11 @@ class DataType(Concrete, Coercible):\n \n @property\n @abstractmethod\n- def scalar(self):\n- ...\n+ def scalar(self): ...\n \n @property\n @abstractmethod\n- def column(self):\n- ...\n+ def column(self): ...\n \n # TODO(kszucs): remove it, prefer to use Annotable.__repr__ instead\n @property\n", "window.py": "@@ -86,13 +86,11 @@ class WindowFrame(Value):\n \n @property\n @abstractmethod\n- def start(self):\n- ...\n+ def start(self): ...\n \n @property\n @abstractmethod\n- def end(self):\n- ...\n+ def end(self): ...\n \n \n @public\n", "pandas.py": "@@ -176,13 +176,9 @@ class PandasData(DataMapper):\n return gpd.GeoSeries(s)\n return gpd.GeoSeries.from_wkb(s)\n \n- convert_Point = (\n- convert_LineString\n- ) = (\n- convert_Polygon\n- ) = (\n- convert_MultiLineString\n- ) = convert_MultiPoint = convert_MultiPolygon = convert_GeoSpatial\n+ convert_Point = convert_LineString = convert_Polygon = convert_MultiLineString = (\n+ convert_MultiPoint\n+ ) = convert_MultiPolygon = convert_GeoSpatial\n \n @classmethod\n def convert_default(cls, s, dtype, pandas_type):\n", "test_util.py": "@@ -1,4 +1,5 @@\n \"\"\"Test ibis.util utilities.\"\"\"\n+\n from __future__ import annotations\n \n import pytest\n", "util.py": "@@ -1,4 +1,5 @@\n \"\"\"Ibis utility functions.\"\"\"\n+\n from __future__ import annotations\n \n import base64\n@@ -43,7 +44,7 @@ V = TypeVar(\"V\")\n \n \n # https://www.compart.com/en/unicode/U+22EE\n-VERTICAL_ELLIPSIS = \"\\u22EE\"\n+VERTICAL_ELLIPSIS = \"\\u22ee\"\n # https://www.compart.com/en/unicode/U+2026\n HORIZONTAL_ELLIPSIS = \"\\u2026\"\n \n", "justfile": "@@ -26,7 +26,7 @@ lock:\n # format code\n fmt:\n ruff format .\n- ruff --fix .\n+ ruff check --fix .\n \n # run all non-backend tests; additional arguments are forwarded to pytest\n check *args:\n", "poetry.lock": "@@ -5924,28 +5924,28 @@ pyasn1 = \">=0.1.3\"\n \n [[package]]\n name = \"ruff\"\n-version = \"0.2.2\"\n+version = \"0.3.0\"\n description = \"An extremely fast Python linter and code formatter, written in Rust.\"\n optional = false\n python-versions = \">=3.7\"\n files = [\n- {file = \"ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl\", hash = \"sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6\"},\n- {file = \"ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl\", hash = \"sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl\", hash = \"sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001\"},\n- {file = \"ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3\"},\n- {file = \"ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl\", hash = \"sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726\"},\n- {file = \"ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl\", hash = \"sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e\"},\n- {file = \"ruff-0.2.2-py3-none-musllinux_1_2_i686.whl\", hash = \"sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e\"},\n- {file = \"ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl\", hash = \"sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9\"},\n- {file = \"ruff-0.2.2-py3-none-win32.whl\", hash = \"sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325\"},\n- {file = \"ruff-0.2.2-py3-none-win_amd64.whl\", hash = \"sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d\"},\n- {file = \"ruff-0.2.2-py3-none-win_arm64.whl\", hash = \"sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd\"},\n- {file = \"ruff-0.2.2.tar.gz\", hash = \"sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d\"},\n+ {file = \"ruff-0.3.0-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl\", hash = \"sha256:7deb528029bacf845bdbb3dbb2927d8ef9b4356a5e731b10eef171e3f0a85944\"},\n+ {file = \"ruff-0.3.0-py3-none-macosx_10_12_x86_64.whl\", hash = \"sha256:e1e0d4381ca88fb2b73ea0766008e703f33f460295de658f5467f6f229658c19\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:2f7dbba46e2827dfcb0f0cc55fba8e96ba7c8700e0a866eb8cef7d1d66c25dcb\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:23dbb808e2f1d68eeadd5f655485e235c102ac6f12ad31505804edced2a5ae77\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:3ef655c51f41d5fa879f98e40c90072b567c666a7114fa2d9fe004dffba00932\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl\", hash = \"sha256:d0d3d7ef3d4f06433d592e5f7d813314a34601e6c5be8481cccb7fa760aa243e\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:b08b356d06a792e49a12074b62222f9d4ea2a11dca9da9f68163b28c71bf1dd4\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:9343690f95710f8cf251bee1013bf43030072b9f8d012fbed6ad702ef70d360a\"},\n+ {file = \"ruff-0.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:a1f3ed501a42f60f4dedb7805fa8d4534e78b4e196f536bac926f805f0743d49\"},\n+ {file = \"ruff-0.3.0-py3-none-musllinux_1_2_aarch64.whl\", hash = \"sha256:cc30a9053ff2f1ffb505a585797c23434d5f6c838bacfe206c0e6cf38c921a1e\"},\n+ {file = \"ruff-0.3.0-py3-none-musllinux_1_2_armv7l.whl\", hash = \"sha256:5da894a29ec018a8293d3d17c797e73b374773943e8369cfc50495573d396933\"},\n+ {file = \"ruff-0.3.0-py3-none-musllinux_1_2_i686.whl\", hash = \"sha256:755c22536d7f1889be25f2baf6fedd019d0c51d079e8417d4441159f3bcd30c2\"},\n+ {file = \"ruff-0.3.0-py3-none-musllinux_1_2_x86_64.whl\", hash = \"sha256:dd73fe7f4c28d317855da6a7bc4aa29a1500320818dd8f27df95f70a01b8171f\"},\n+ {file = \"ruff-0.3.0-py3-none-win32.whl\", hash = \"sha256:19eacceb4c9406f6c41af806418a26fdb23120dfe53583df76d1401c92b7c14b\"},\n+ {file = \"ruff-0.3.0-py3-none-win_amd64.whl\", hash = \"sha256:128265876c1d703e5f5e5a4543bd8be47c73a9ba223fd3989d4aa87dd06f312f\"},\n+ {file = \"ruff-0.3.0-py3-none-win_arm64.whl\", hash = \"sha256:e3a4a6d46aef0a84b74fcd201a4401ea9a6cd85614f6a9435f2d33dd8cefbf83\"},\n+ {file = \"ruff-0.3.0.tar.gz\", hash = \"sha256:0886184ba2618d815067cf43e005388967b67ab9c80df52b32ec1152ab49f53a\"},\n ]\n \n [[package]]\n", "requirements-dev.txt": "@@ -220,7 +220,7 @@ requests[socks]==2.31.0 ; python_version >= \"3.10\" and python_version < \"3.13\"\n rich==13.7.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n rpds-py==0.18.0 ; python_version >= \"3.10\" and python_version < \"3.13\"\n rsa==4.9 ; python_version >= \"3.9\" and python_version < \"4\"\n-ruff==0.2.2 ; python_version >= \"3.9\" and python_version < \"4.0\"\n+ruff==0.3.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n scikit-learn==1.4.1.post1 ; python_version >= \"3.10\" and python_version < \"3.13\"\n scipy==1.12.0 ; python_version >= \"3.10\" and python_version < \"3.13\"\n scooby==0.9.2 ; python_version >= \"3.10\" and python_version < \"3.13\"\n"}
fix: require generic type of ChangeSet (fixes older TS compatibility)
d8503d7bacc4fd369e55814c416c07c8de0ef125
fix
https://github.com/mikro-orm/mikro-orm/commit/d8503d7bacc4fd369e55814c416c07c8de0ef125
require generic type of ChangeSet (fixes older TS compatibility)
{"Entity.ts": "@@ -52,7 +52,7 @@ export type EntityData<T extends IEntityType<T>> = { [P in keyof T]?: T[P] | IPr\n export interface EntityProperty {\n name: string;\n fk: string;\n- entity: () => string | Function;\n+ entity: () => EntityName<IEntity>;\n type: string;\n primary: boolean;\n reference: ReferenceType;\n", "ChangeSet.ts": "@@ -1,7 +1,6 @@\n import { EntityData, IEntityType } from '../decorators';\n \n-export interface ChangeSet<T extends IEntityType<T> = IEntityType<any>> {\n- index: number;\n+export interface ChangeSet<T extends IEntityType<T>> {\n name: string;\n collection: string;\n delete: boolean;\n", "ChangeSetComputer.ts": "@@ -1,6 +1,6 @@\n import { Utils } from '../utils';\n import { MetadataStorage } from '../metadata';\n-import { EntityProperty, IEntity } from '../decorators';\n+import { EntityData, EntityProperty, IEntity, IEntityType } from '../decorators';\n import { ChangeSet } from './ChangeSet';\n import { Collection, EntityIdentifier, EntityValidator, ReferenceType } from '../entity';\n \n@@ -9,11 +9,11 @@ export class ChangeSetComputer {\n private readonly metadata = MetadataStorage.getMetadata();\n \n constructor(private readonly validator: EntityValidator,\n- private readonly originalEntityData: Record<string, IEntity>,\n+ private readonly originalEntityData: Record<string, EntityData<IEntity>>,\n private readonly identifierMap: Record<string, EntityIdentifier>) { }\n \n- computeChangeSet(entity: IEntity): ChangeSet | null {\n- const changeSet = { entity } as ChangeSet;\n+ computeChangeSet<T extends IEntityType<T>>(entity: T): ChangeSet<T> | null {\n+ const changeSet = { entity } as ChangeSet<T>;\n const meta = this.metadata[entity.constructor.name];\n \n changeSet.name = meta.name;\n@@ -33,32 +33,32 @@ export class ChangeSetComputer {\n return changeSet;\n }\n \n- private computePayload(entity: IEntity): Record<string, any> {\n+ private computePayload<T extends IEntityType<T>>(entity: T): EntityData<T> {\n if (entity.id && this.originalEntityData[entity.uuid]) {\n- return Utils.diffEntities(this.originalEntityData[entity.uuid], entity);\n+ return Utils.diffEntities<T>(this.originalEntityData[entity.uuid] as T, entity);\n } else {\n return Utils.prepareEntity(entity);\n }\n }\n \n- private processReference(changeSet: ChangeSet, prop: EntityProperty): void {\n+ private processReference<T extends IEntityType<T>>(changeSet: ChangeSet<T>, prop: EntityProperty): void {\n if (prop.reference === ReferenceType.MANY_TO_MANY && prop.owner) {\n- this.processManyToMany(changeSet, prop, changeSet.entity[prop.name]);\n- } else if (prop.reference === ReferenceType.MANY_TO_ONE && changeSet.entity[prop.name]) {\n+ this.processManyToMany(changeSet, prop, changeSet.entity[prop.name as keyof T]);\n+ } else if (prop.reference === ReferenceType.MANY_TO_ONE && changeSet.entity[prop.name as keyof T]) {\n this.processManyToOne(prop, changeSet);\n }\n }\n \n- private processManyToOne(prop: EntityProperty, changeSet: ChangeSet): void {\n+ private processManyToOne<T extends IEntityType<T>>(prop: EntityProperty, changeSet: ChangeSet<T>): void {\n const pk = this.metadata[prop.type].primaryKey;\n- const entity = changeSet.entity[prop.name];\n+ const entity = changeSet.entity[prop.name as keyof T];\n \n if (!entity[pk]) {\n changeSet.payload[prop.name] = this.identifierMap[entity.uuid];\n }\n }\n \n- private processManyToMany(changeSet: ChangeSet, prop: EntityProperty, collection: Collection<IEntity>): void {\n+ private processManyToMany<T extends IEntityType<T>>(changeSet: ChangeSet<T>, prop: EntityProperty, collection: Collection<IEntity>): void {\n if (prop.owner && collection.isDirty()) {\n const pk = this.metadata[prop.type].primaryKey as keyof IEntity;\n changeSet.payload[prop.name] = collection.getItems().map(item => item[pk] || this.identifierMap[item.uuid]);\n", "UnitOfWork.ts": "@@ -21,7 +21,7 @@ export class UnitOfWork {\n \n private readonly persistStack: IEntity[] = [];\n private readonly removeStack: IEntity[] = [];\n- private readonly changeSets: ChangeSet[] = [];\n+ private readonly changeSets: ChangeSet<IEntity>[] = [];\n private readonly metadata = MetadataStorage.getMetadata();\n private readonly changeSetComputer = new ChangeSetComputer(this.em.getValidator(), this.originalEntityData, this.identifierMap);\n private readonly changeSetPersister = new ChangeSetPersister(this.em.getDriver(), this.identifierMap);\n@@ -121,7 +121,7 @@ export class UnitOfWork {\n \n for (const entity of Object.values(this.removeStack)) {\n const meta = this.metadata[entity.constructor.name];\n- this.changeSets.push({ entity, delete: true, name: meta.name, collection: meta.collection, payload: {} } as ChangeSet);\n+ this.changeSets.push({ entity, delete: true, name: meta.name, collection: meta.collection, payload: {} } as ChangeSet<IEntity>);\n }\n }\n \n", "Utils.ts": "@@ -2,7 +2,7 @@ import * as fastEqual from 'fast-deep-equal';\n import * as clone from 'clone';\n \n import { MetadataStorage } from '../metadata';\n-import { EntityMetadata, IEntity, IEntityType, IPrimaryKey } from '../decorators';\n+import { EntityData, EntityMetadata, IEntity, IEntityType, IPrimaryKey } from '../decorators';\n import { ArrayCollection } from '../entity';\n \n export class Utils {\n@@ -47,7 +47,7 @@ export class Utils {\n return Utils.merge(target, ...sources);\n }\n \n- static diff(a: Record<string, any>, b: Record<string, any>): Record<string, any> {\n+ static diff(a: Record<string, any>, b: Record<string, any>): Record<keyof (typeof a & typeof b), any> {\n const ret: Record<string, any> = {};\n \n Object.keys(b).forEach(k => {\n@@ -61,11 +61,11 @@ export class Utils {\n return ret;\n }\n \n- static diffEntities(a: IEntity, b: IEntity): Record<string, any> {\n- return Utils.diff(Utils.prepareEntity(a), Utils.prepareEntity(b));\n+ static diffEntities<T extends IEntityType<T>>(a: T, b: T): EntityData<T> {\n+ return Utils.diff(Utils.prepareEntity(a), Utils.prepareEntity(b)) as EntityData<T>;\n }\n \n- static prepareEntity<T>(e: IEntityType<T>): Record<string, any> {\n+ static prepareEntity<T extends IEntityType<T>>(e: T): EntityData<T> {\n const metadata = MetadataStorage.getMetadata();\n const meta = metadata[e.constructor.name];\n const ret = Utils.copy(e);\n"}
fix: avoid observing animated parents until first observer Since "To" and "SpringTransform" objects are typically created during render, they can't start observing their inputs until the commit phase.
680e1e4c66821794ee4faff28fc296096a7d77ba
fix
https://github.com/pmndrs/react-spring/commit/680e1e4c66821794ee4faff28fc296096a7d77ba
avoid observing animated parents until first observer Since "To" and "SpringTransform" objects are typically created during render, they can't start observing their inputs until the commit phase.
{"To.ts": "@@ -28,10 +28,6 @@ export class To<In = any, Out = any> extends SpringValue<Out, 'to'> {\n \n // By default, update immediately when a source changes.\n this.animation = { owner: this, immediate: true } as any\n- each(toArray(source), source => {\n- this.priority = Math.max(this.priority || 0, (source.priority || 0) + 1)\n- source.addChild(this)\n- })\n }\n \n protected _animateTo(value: Out | FluidValue<Out>) {\n@@ -50,6 +46,32 @@ export class To<In = any, Out = any> extends SpringValue<Out, 'to'> {\n return this.calc(...inputs)\n }\n \n+ /** @internal */\n+ addChild(observer: FluidObserver<Out>) {\n+ // Start observing our \"source\" once we have an observer.\n+ if (!this._children.size) {\n+ let priority = 0\n+ each(toArray(this.source), source => {\n+ priority = Math.max(priority, (source.priority || 0) + 1)\n+ source.addChild(this)\n+ })\n+ this._setPriority(priority)\n+ }\n+\n+ super.addChild(observer)\n+ }\n+\n+ removeChild(observer: FluidObserver<Out>) {\n+ super.removeChild(observer)\n+\n+ // Stop observing our \"source\" once we have no observers.\n+ if (!this._children.size) {\n+ each(toArray(this.source), source => {\n+ source.removeChild(this)\n+ })\n+ }\n+ }\n+\n /** @internal */\n onParentChange(_value: any, finished: boolean) {\n // TODO: only compute once per frame\n", "AnimatedStyle.ts": "@@ -48,65 +48,27 @@ const isValueIdentity = (value: OneOrMore<Value>, id: number): boolean =>\n const getValue = <T>(value: T | FluidValue<T>) =>\n isFluidValue(value) ? value.get() : value\n \n+type Inputs = (Value | FluidValue<Value>)[][]\n+type Transforms = ((value: any) => [string, boolean])[]\n+\n /**\n * This AnimatedStyle will simplify animated components transforms by\n * interpolating all transform function passed as keys in the style object\n * including shortcuts such as x, y and z for translateX/Y/Z\n */\n export class AnimatedStyle extends AnimatedObject {\n- constructor(style: Indexable) {\n- style.transform = new SpringTransform(style)\n- super(style)\n- }\n-}\n-\n-class SpringTransform extends SpringValue<string, 'transform'> {\n- /**\n- * An array of arrays that contains the values (static or fluid)\n- * used by each transform function.\n- */\n- private _inputs: (Value | FluidValue<Value>)[][] = []\n-\n- /**\n- * An array of functions that take a list of values (static or fluid)\n- * and returns (1) a CSS transform string and (2) a boolean that's true\n- * when the transform has no effect (eg: an identity transform).\n- */\n- private _transforms: ((value: any) => [string, boolean])[] = []\n-\n- constructor(style: Indexable) {\n- super('transform')\n- this._parseStyle(style)\n- this.node = new AnimatedValue(this._getValue())\n- each(this._inputs, input =>\n- each(input, value => isFluidValue(value) && value.addChild(this))\n- )\n- }\n-\n- dispose() {\n- each(this._inputs, input =>\n- each(input, value => isFluidValue(value) && value.removeChild(this))\n- )\n- super.dispose()\n- }\n-\n- /** @internal */\n- onParentChange() {\n- // TODO: only call \"_getValue\" once per frame max\n- this.set(this._getValue())\n- }\n-\n- /** @internal */\n- removeChild(observer: SpringObserver<string>) {\n- super.removeChild(observer)\n- if (!this._children.size) {\n- this.dispose()\n- }\n- }\n-\n- protected _parseStyle({ x, y, z, ...style }: Indexable) {\n- const inputs = this._inputs\n- const transforms = this._transforms\n+ constructor({ x, y, z, ...style }: Indexable) {\n+ /**\n+ * An array of arrays that contains the values (static or fluid)\n+ * used by each transform function.\n+ */\n+ const inputs: Inputs = []\n+ /**\n+ * An array of functions that take a list of values (static or fluid)\n+ * and returns (1) a CSS transform string and (2) a boolean that's true\n+ * when the transform has no effect (eg: an identity transform).\n+ */\n+ const transforms: Transforms = []\n \n // Combine x/y/z into translate3d\n if (x || y || z) {\n@@ -146,13 +108,56 @@ class SpringTransform extends SpringValue<string, 'transform'> {\n )\n }\n })\n+\n+ if (inputs.length) {\n+ style.transform = new SpringTransform(inputs, transforms)\n+ }\n+\n+ super(style)\n+ }\n+}\n+\n+class SpringTransform extends SpringValue<string, 'transform'> {\n+ constructor(readonly inputs: Inputs, readonly transforms: Transforms) {\n+ super('transform')\n+ this.node = new AnimatedValue(this._compute())\n+ }\n+\n+ /** @internal */\n+ onParentChange() {\n+ // TODO: only compute once per frame max\n+ this.set(this._compute())\n+ }\n+\n+ /** @internal */\n+ addChild(observer: SpringObserver<string>) {\n+ // Start observing our inputs once we have an observer.\n+ if (!this._children.size) {\n+ each(this.inputs, input =>\n+ each(input, value => isFluidValue(value) && value.addChild(this))\n+ )\n+ }\n+\n+ super.addChild(observer)\n+ }\n+\n+ /** @internal */\n+ removeChild(observer: SpringObserver<string>) {\n+ super.removeChild(observer)\n+\n+ // Stop observing our inputs once we have no observers.\n+ if (!this._children.size) {\n+ each(this.inputs, input =>\n+ each(input, value => isFluidValue(value) && value.removeChild(this))\n+ )\n+ }\n }\n \n- protected _getValue() {\n+ protected _compute() {\n let transform = ''\n let identity = true\n- each(this._inputs, (input, i) => {\n- const [t, id] = this._transforms[i](input.map(getValue))\n+ each(this.inputs, (input, i) => {\n+ const [t, id] = this.transforms[i](input.map(getValue))\n transform += ' ' + t\n identity = identity && id\n })\n"}
chore(release): v5.7.0 [skip ci]
ce06eec2daa69087db51db7e868dd1bd9b7d1afa
chore
https://github.com/mikro-orm/mikro-orm/commit/ce06eec2daa69087db51db7e868dd1bd9b7d1afa
v5.7.0 [skip ci]
{"CHANGELOG.md": "@@ -3,6 +3,14 @@\n All notable changes to this project will be documented in this file.\n See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.\n \n+# [5.7.0](https://github.com/mikro-orm/mikro-orm/compare/v5.6.16...v5.7.0) (2023-04-23)\n+\n+**Note:** Version bump only for package @mikro-orm/sqlite\n+\n+\n+\n+\n+\n ## [5.6.16](https://github.com/mikro-orm/mikro-orm/compare/v5.6.15...v5.6.16) (2023-04-04)\n \n \n", "lerna.json": "@@ -2,7 +2,7 @@\n \"packages\": [\n \"packages/*\"\n ],\n- \"version\": \"5.6.16\",\n+ \"version\": \"5.7.0\",\n \"command\": {\n \"version\": {\n \"conventionalCommits\": true,\n", "package.json": "@@ -1,6 +1,6 @@\n {\n \"name\": \"@mikro-orm/sqlite\",\n- \"version\": \"5.6.16\",\n+ \"version\": \"5.7.0\",\n \"description\": \"TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.\",\n \"main\": \"dist/index.js\",\n \"module\": \"dist/index.mjs\",\n@@ -58,13 +58,13 @@\n \"access\": \"public\"\n },\n \"dependencies\": {\n- \"@mikro-orm/knex\": \"~5.6.16\",\n+ \"@mikro-orm/knex\": \"^5.7.0\",\n \"fs-extra\": \"11.1.1\",\n \"sqlite3\": \"5.1.6\",\n \"sqlstring-sqlite\": \"0.1.1\"\n },\n \"devDependencies\": {\n- \"@mikro-orm/core\": \"^5.6.16\"\n+ \"@mikro-orm/core\": \"^5.7.0\"\n },\n \"peerDependencies\": {\n \"@mikro-orm/core\": \"^5.0.0\",\n", "yarn.lock": "Binary files a/yarn.lock and b/yarn.lock differ\n"}
docs: patch docs build to fix anchor links
51be4592473e99b20dcdb56d67c4fece40a0a1b0
docs
https://github.com/ibis-project/ibis/commit/51be4592473e99b20dcdb56d67c4fece40a0a1b0
patch docs build to fix anchor links
{"poetry-overrides.nix": "@@ -17,6 +17,21 @@ in\n });\n \n ipython-genutils = self.ipython_genutils;\n+\n+ mkdocs-jupyter =\n+ let\n+ linksPatch = self.pkgs.fetchpatch {\n+ name = \"fix-mkdocs-jupyter-heading-links.patch\";\n+ url = \"https://github.com/danielfrg/mkdocs-jupyter/commit/f3b517580132fc743a34e5d9947731bc4f3c2143.patch\";\n+ sha256 = \"sha256-qcNobdcIziX3pFfnm6vxnhTqow/2VGI/+jbBs9jXkUo=\";\n+ };\n+ in\n+ super.mkdocs-jupyter.overridePythonAttrs (_: {\n+ postFixup = ''\n+ cd $out/${self.python.sitePackages}\n+ patch -p1 < \"${linksPatch}\"\n+ '';\n+ });\n } // super.lib.listToAttrs (\n map\n (name: {\n"}
build: updating vanilla_new demo
a63ce94749cdd7e3a29c44c097017c3e1090b1c9
build
https://github.com/tsparticles/tsparticles/commit/a63ce94749cdd7e3a29c44c097017c3e1090b1c9
updating vanilla_new demo
{".gitignore": "@@ -0,0 +1,18 @@\n+js/tsparticles.min.js\n+js/tsparticles.interaction.light.min.js\n+js/tsparticles.interaction.particles.repulse.min.js\n+js/tsparticles.path.curves.min.js\n+js/tsparticles.path.perlin.noise.min.js\n+js/tsparticles.path.polygon.min.js\n+js/tsparticles.path.simplex.noise.min.js\n+js/tsparticles.plugin.infection.min.js\n+js/tsparticles.updater.gradient.min.js\n+js/tsparticles.updater.orbit.min.js\n+js/tsparticles.bundle.min.js\n+css/main.min.css\n+css/404.min.css\n+js/404.min.js\n+css/404.css.map\n+css/main.css.map\n+css/404.css\n+css/main.css\n", "404.json": "@@ -1,70 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": -1\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 50\n- },\n- \"color\": {\n- \"value\": [\n- \"#3998D0\",\n- \"#2EB6AF\",\n- \"#A9BD33\",\n- \"#FEC73B\",\n- \"#F89930\",\n- \"#F45623\",\n- \"#D62E32\",\n- \"#EB586E\",\n- \"#9952CF\"\n- ]\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.4\n- }\n- },\n- \"size\": {\n- \"value\": 400,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 200\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 100,\n- \"minimumValue\": 200,\n- \"sync\": false\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 10,\n- \"direction\": \"top\",\n- \"random\": false,\n- \"straight\": false,\n- \"outMode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#ffffff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n\\ No newline at end of file\n", "absorbers.json": "@@ -1,117 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 300,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": true\n- },\n- \"size\": {\n- \"value\": 2,\n- \"random\": true\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0.5,\n- \"direction\": \"top\",\n- \"random\": false,\n- \"straight\": true,\n- \"out_mode\": \"out\",\n- \"bounce\": true,\n- \"warp\": true,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"absorbers\": {\n- \"color\": \"#ff0000\",\n- \"draggable\": true,\n- \"size\": {\n- \"value\": 10,\n- \"limit\": 50,\n- \"density\": 50,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 10\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"absorbers\": {\n- \"draggable\": true,\n- \"size\": {\n- \"value\": 10,\n- \"limit\": 10,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- },\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "amongUs.json": "@@ -1,197 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"groups\": {\n- \"z5000\": {\n- \"number\": {\n- \"value\": 70\n- },\n- \"zIndex\": {\n- \"value\": 50\n- }\n- },\n- \"z7500\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"zIndex\": {\n- \"value\": 75\n- }\n- },\n- \"z2500\": {\n- \"number\": {\n- \"value\": 50\n- },\n- \"zIndex\": {\n- \"value\": 25\n- }\n- },\n- \"z1000\": {\n- \"number\": {\n- \"value\": 40\n- },\n- \"zIndex\": {\n- \"value\": 10\n- }\n- }\n- },\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#fff\",\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3\n- },\n- \"move\": {\n- \"angle\": {\n- \"value\": 10,\n- \"offset\": 0\n- },\n- \"enable\": true,\n- \"speed\": 5,\n- \"direction\": \"right\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- },\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- },\n- \"zIndex\": {\n- \"value\": 5,\n- \"opacityRate\": 0.5\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"links\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"quantity\": 4,\n- \"groups\": [\n- \"z5000\",\n- \"z7500\",\n- \"z2500\",\n- \"z1000\"\n- ]\n- },\n- \"remove\": {\n- \"quantity\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- },\n- \"emitters\": {\n- \"position\": {\n- \"y\": 55,\n- \"x\": -5\n- },\n- \"rate\": {\n- \"delay\": 7,\n- \"quantity\": 1\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"images\",\n- \"options\": {\n- \"images\": {\n- \"src\": \"https://particles.js.org/images/cyan_amongus.png\",\n- \"width\": 500,\n- \"height\": 634\n- }\n- }\n- },\n- \"size\": {\n- \"value\": 40\n- },\n- \"move\": {\n- \"speed\": 10,\n- \"outModes\": {\n- \"default\": \"none\",\n- \"right\": \"destroy\"\n- },\n- \"straight\": true\n- },\n- \"zIndex\": {\n- \"value\": 0\n- },\n- \"rotate\": {\n- \"value\": {\n- \"min\": 0,\n- \"max\": 360\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 10,\n- \"sync\": true\n- }\n- }\n- }\n- }\n-}\n", "background.json": "@@ -1,121 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 30,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 1,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 100,\n- \"duration\": 2,\n- \"opacity\": 1\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"backgroundMask\": {\n- \"enable\": true,\n- \"cover\": {\n- \"value\": {\n- \"r\": 255,\n- \"g\": 255,\n- \"b\": 255\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"fpsLimit\": 120,\n- \"background\": {\n- \"color\": \"#ffffff\",\n- \"image\": \"url('https://particles.js.org/images/background3.jpg')\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "big.json": "@@ -1,123 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 30,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.4\n- },\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 400,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 300\n- },\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 100,\n- \"size_min\": 300,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 10,\n- \"direction\": \"top\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#ffffff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "blackHole.json": "@@ -1,137 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 1000,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#ffffff\",\n- \"#77ccff\",\n- \"#ff3333\",\n- \"#ffff33\"\n- ]\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0.5,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"warp\": true\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"absorbers\": {\n- \"color\": \"#ff0000\",\n- \"size\": {\n- \"value\": 10,\n- \"limit\": 50,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"absorbers\": {\n- \"orbits\": true,\n- \"destroy\": false,\n- \"size\": {\n- \"value\": 5,\n- \"limit\": 50,\n- \"random\": false,\n- \"density\": 1500\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- },\n- \"background\": {\n- \"color\": \"#000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "bubble.json": "@@ -1,117 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 6,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#1b1e34\"\n- },\n- \"shape\": {\n- \"type\": \"polygon\",\n- \"polygon\": {\n- \"nb_sides\": 6\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.3\n- },\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 160,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 100\n- },\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 5,\n- \"size_min\": 40,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 200,\n- \"color\": \"#ffffff\",\n- \"opacity\": 1,\n- \"width\": 2\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 8,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\"\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"size\": 40,\n- \"opacity\": 0.8,\n- \"color\": \"#ff0000\",\n- \"mix\": true\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#efefef\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "chars.json": "@@ -1,155 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\"\n- },\n- \"stroke\": {\n- \"width\": 1,\n- \"color\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"char\",\n- \"character\": [\n- {\n- \"value\": [\n- \"t\",\n- \"s\",\n- \"P\",\n- \"a\",\n- \"r\",\n- \"t\",\n- \"i\",\n- \"c\",\n- \"l\",\n- \"e\",\n- \"s\"\n- ],\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"weight\": \"400\",\n- \"fill\": true\n- },\n- {\n- \"value\": [\n- \"t\",\n- \"s\",\n- \"P\",\n- \"a\",\n- \"r\",\n- \"t\",\n- \"i\",\n- \"c\",\n- \"l\",\n- \"e\",\n- \"s\"\n- ],\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"weight\": \"400\",\n- \"fill\": false\n- }\n- ]\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 16,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 10,\n- \"size_min\": 10,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "clickPause.json": "@@ -1,106 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"pause\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "collisionsAbsorb.json": "@@ -1,118 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"lineLinked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"collisions\": {\n- \"enable\": true,\n- \"mode\": \"absorb\"\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "collisionsBounce.json": "@@ -1,115 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"lineLinked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"bounce\": true,\n- \"enable\": true,\n- \"speed\": 10,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "collisionsDestroy.json": "@@ -1,150 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#3998D0\",\n- \"#2EB6AF\",\n- \"#A9BD33\",\n- \"#FEC73B\",\n- \"#F89930\",\n- \"#F45623\",\n- \"#D62E32\",\n- \"#EB586E\",\n- \"#9952CF\"\n- ]\n- },\n- \"destroy\": {\n- \"mode\": \"split\",\n- \"split\": {\n- \"count\": 1,\n- \"factor\": {\n- \"value\": {\n- \"min\": 4,\n- \"max\": 9\n- }\n- },\n- \"particles\": {\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"destroy\": {\n- \"mode\": \"none\"\n- },\n- \"life\": {\n- \"count\": 1,\n- \"duration\": {\n- \"value\": 1\n- }\n- }\n- }\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"lineLinked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"collisions\": {\n- \"enable\": true,\n- \"mode\": \"destroy\"\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 1\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "confetti.json": "@@ -1,138 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 1\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": [\n- \"#ffffff\",\n- \"#ff0000\"\n- ]\n- },\n- \"shape\": {\n- \"type\": [\n- \"square\",\n- \"circle\"\n- ]\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0,\n- \"speed\": 2,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\"\n- }\n- },\n- \"size\": {\n- \"value\": 7,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 3\n- }\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"life\": {\n- \"duration\": {\n- \"sync\": true,\n- \"value\": 5\n- },\n- \"count\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"enable\": true\n- },\n- \"speed\": 100,\n- \"decay\": 0.1,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"destroy\",\n- \"top\": \"none\"\n- }\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"direction\": \"random\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60\n- }\n- },\n- \"roll\": {\n- \"darken\": {\n- \"enable\": true,\n- \"value\": 25\n- },\n- \"enable\": true,\n- \"speed\": {\n- \"min\": 15,\n- \"max\": 25\n- }\n- },\n- \"tilt\": {\n- \"direction\": \"random\",\n- \"enable\": true,\n- \"value\": 0,\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60\n- }\n- },\n- \"wobble\": {\n- \"distance\": 30,\n- \"enable\": true,\n- \"speed\": {\n- \"min\": -15,\n- \"max\": 15\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"emitters\": [\n- {\n- \"direction\": \"top-left\",\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 5\n- },\n- \"position\": {\n- \"x\": 100,\n- \"y\": 50\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- }\n- },\n- {\n- \"direction\": \"top-right\",\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 5\n- },\n- \"position\": {\n- \"x\": 0,\n- \"y\": 50\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- }\n- }\n- ]\n-}\n", "connect.json": "@@ -1,147 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"connect\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 6,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 500,\n- \"value\": 300\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"stroke\": {\n- \"color\": \"#fff\",\n- \"width\": 1\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 10\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "customPreset.json": "@@ -1,4 +0,0 @@\n-{\n- \"fpsLimit\": 120,\n- \"preset\": \"fire\"\n-}\n", "customShape.json": "@@ -1,50 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"stroke\": {\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"width\": 1\n- },\n- \"shape\": {\n- \"type\": \"spiral\",\n- \"custom\": {\n- \"spiral\": {\n- \"innerRadius\": 1,\n- \"lineSpacing\": 1,\n- \"fill\": false,\n- \"close\": false\n- }\n- }\n- },\n- \"size\": {\n- \"value\": 20\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 50,\n- \"sync\": false\n- }\n- }\n- },\n- \"preset\": \"links\",\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "dataImages.json": "@@ -1,178 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#000\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"rotate\": {\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- },\n- \"direction\": \"random\",\n- \"random\": true,\n- \"value\": 0\n- },\n- \"shape\": {\n- \"character\": {\n- \"fill\": false,\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"value\": \"*\",\n- \"weight\": \"400\"\n- },\n- \"image\": [\n- {\n- \"src\": \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAACXBIWXMAAAsTAAALEwEAmpwYAAACTElEQVRYhe2WzUtUYRTGf+eORZq1aGlESjEzjn2QY5KVYRiEJVngtk2LWljLahdGEeW2rf9AkS6EKKQSFxLoWOEHt1tYluHCFiONTJMz854WGQjpnXdmIArmgbs595zneTjn3ve8UEIJ/zuCd4/vD/a0Nhda7xQl3tPaLOIMi6GpUA6xSep+2rLbmEAHmL0gWwWJK7x4OOFEVMwG7/rza4UaKLOzufEbmu5CpAZAUYALR2qyoyOzzpNCxcFyBN0nBxeMBk4Di6vj2ytpdESsuliUAYDb7c9cRduBL79j8RTvjar+FQMAt04NjTjZzSFUO1Xk/NCMDBQjDrbfwApi0aqK8jsfNtW9nO4DCN07sWd1/6eb6rZ9X46nGsbnk7acVvNz68NRHL0P2ggEAE9EbnR07qgXSPU/+uyKchMIAVmQUYxcqX31drxoA140eNQIQ6zRrb7wlscBJXPWS3SsUZoRpCUc80b8+HOOwAi96+Wd8xKH/bgV7QVq/fh9O+AeCleTMR9zmfSDZgPVkdfup/Xe+/8F2czOYsQBKPPn8DWQTpVPAOki5Jd/pNMTBRvYNzkZBwYLlhcGD7yZXfRLyXkQKTwoXF9y1uY0kNSlPoG5/MWZWzKJ/qINNIzPJw3kvW6NcNXmRLTeZG5DcBg4Zpctw7Uxr8Um03oZKYEuIPFH+NezGgnFuWzLa20gEnOnBGkDvq6EjCoXES4BBgBhQZC2SMydsuXN+zIRi1ZVVEjlmYAwExp7NwbgHQwezCq7kro0kM8mLKGEfwI/AbDUxKelB9HiAAAAAElFTkSuQmCC\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4PSIwcHgiIHk9IjBweCIgdmlld0JveD0iMCAwIDEwMCAxMDAiIGVuYWJsZS1iYWNrZ3JvdW5kPSJuZXcgMCAwIDEwMCAxMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiIGhlaWdodD0iMTAwcHgiIHdpZHRoPSIxMDBweCI+CjxnPgoJPHBhdGggZD0iTTI4LjEsMzYuNmM0LjYsMS45LDEyLjIsMS42LDIwLjksMS4xYzguOS0wLjQsMTktMC45LDI4LjksMC45YzYuMywxLjIsMTEuOSwzLjEsMTYuOCw2Yy0xLjUtMTIuMi03LjktMjMuNy0xOC42LTMxLjMgICBjLTQuOS0wLjItOS45LDAuMy0xNC44LDEuNEM0Ny44LDE3LjksMzYuMiwyNS42LDI4LjEsMzYuNnoiLz4KCTxwYXRoIGQ9Ik03MC4zLDkuOEM1Ny41LDMuNCw0Mi44LDMuNiwzMC41LDkuNWMtMyw2LTguNCwxOS42LTUuMywyNC45YzguNi0xMS43LDIwLjktMTkuOCwzNS4yLTIzLjFDNjMuNywxMC41LDY3LDEwLDcwLjMsOS44eiIvPgoJPHBhdGggZD0iTTE2LjUsNTEuM2MwLjYtMS43LDEuMi0zLjQsMi01LjFjLTMuOC0zLjQtNy41LTctMTEtMTAuOGMtMi4xLDYuMS0yLjgsMTIuNS0yLjMsMTguN0M5LjYsNTEuMSwxMy40LDUwLjIsMTYuNSw1MS4zeiIvPgoJPHBhdGggZD0iTTksMzEuNmMzLjUsMy45LDcuMiw3LjYsMTEuMSwxMS4xYzAuOC0xLjYsMS43LTMuMSwyLjYtNC42YzAuMS0wLjIsMC4zLTAuNCwwLjQtMC42Yy0yLjktMy4zLTMuMS05LjItMC42LTE3LjYgICBjMC44LTIuNywxLjgtNS4zLDIuNy03LjRjLTUuMiwzLjQtOS44LDgtMTMuMywxMy43QzEwLjgsMjcuOSw5LjgsMjkuNyw5LDMxLjZ6Ii8+Cgk8cGF0aCBkPSJNMTUuNCw1NC43Yy0yLjYtMS02LjEsMC43LTkuNywzLjRjMS4yLDYuNiwzLjksMTMsOCwxOC41QzEzLDY5LjMsMTMuNSw2MS44LDE1LjQsNTQuN3oiLz4KCTxwYXRoIGQ9Ik0zOS44LDU3LjZDNTQuMyw2Ni43LDcwLDczLDg2LjUsNzYuNGMwLjYtMC44LDEuMS0xLjYsMS43LTIuNWM0LjgtNy43LDctMTYuMyw2LjgtMjQuOGMtMTMuOC05LjMtMzEuMy04LjQtNDUuOC03LjcgICBjLTkuNSwwLjUtMTcuOCwwLjktMjMuMi0xLjdjLTAuMSwwLjEtMC4yLDAuMy0wLjMsMC40Yy0xLDEuNy0yLDMuNC0yLjksNS4xQzI4LjIsNDkuNywzMy44LDUzLjksMzkuOCw1Ny42eiIvPgoJPHBhdGggZD0iTTI2LjIsODguMmMzLjMsMiw2LjcsMy42LDEwLjIsNC43Yy0zLjUtNi4yLTYuMy0xMi42LTguOC0xOC41Yy0zLjEtNy4yLTUuOC0xMy41LTktMTcuMmMtMS45LDgtMiwxNi40LTAuMywyNC43ICAgQzIwLjYsODQuMiwyMy4yLDg2LjMsMjYuMiw4OC4yeiIvPgoJPHBhdGggZD0iTTMwLjksNzNjMi45LDYuOCw2LjEsMTQuNCwxMC41LDIxLjJjMTUuNiwzLDMyLTIuMyw0Mi42LTE0LjZDNjcuNyw3Niw1Mi4yLDY5LjYsMzcuOSw2MC43QzMyLDU3LDI2LjUsNTMsMjEuMyw0OC42ICAgYy0wLjYsMS41LTEuMiwzLTEuNyw0LjZDMjQuMSw1Ny4xLDI3LjMsNjQuNSwzMC45LDczeiIvPgo8L2c+Cjwvc3ZnPg==\",\n- \"width\": 32,\n- \"height\": 32\n- }\n- ],\n- \"polygon\": {\n- \"sides\": 5\n- },\n- \"stroke\": {\n- \"color\": \"#000000\",\n- \"width\": 0\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "default.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "destroy.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"destroy\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "disappearing.json": "@@ -1,97 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"background\": {\n- \"color\": {\n- \"value\": \"#000\"\n- }\n- },\n- \"fpsLimit\": 120,\n- \"emitters\": {\n- \"direction\": \"random\",\n- \"size\": {\n- \"width\": 100,\n- \"height\": 100\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 10\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.3\n- }\n- },\n- \"size\": {\n- \"value\": 50,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 30,\n- \"size_min\": 2,\n- \"sync\": true,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\"\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"destroy\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"repulse\": {\n- \"distance\": 100\n- },\n- \"push\": {\n- \"quantity\": 4\n- }\n- }\n- },\n- \"detectRetina\": true\n-}\n", "divEvents.json": "@@ -1,148 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fps_limit\": 60,\n- \"background\": {\n- \"color\": \"#0d47a1\"\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"ondiv\": [\n- {\n- \"enable\": true,\n- \"selectors\": \".bubble.circle\",\n- \"mode\": \"bubble\",\n- \"type\": \"circle\"\n- },\n- {\n- \"enable\": true,\n- \"selectors\": \".repulse.circle\",\n- \"mode\": \"repulse\",\n- \"type\": \"circle\"\n- },\n- {\n- \"enable\": true,\n- \"selectors\": \".bubble.rectangle\",\n- \"mode\": \"bubble\",\n- \"type\": \"rectangle\"\n- },\n- {\n- \"enable\": true,\n- \"selectors\": \".repulse.rectangle\",\n- \"mode\": \"repulse\",\n- \"type\": \"rectangle\"\n- },\n- {\n- \"enable\": true,\n- \"selectors\": \".bounce.circle\",\n- \"mode\": \"bounce\",\n- \"type\": \"circle\"\n- },\n- {\n- \"enable\": true,\n- \"selectors\": \".bounce.rectangle\",\n- \"mode\": \"bounce\",\n- \"type\": \"rectangle\"\n- }\n- ],\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 6,\n- \"color\": \"#000000\"\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"line_linked\": {\n- \"color\": \"#ffffff\",\n- \"distance\": 150,\n- \"enable\": true,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"out_mode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- },\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"anim\": {\n- \"enable\": false,\n- \"opacity_min\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"anim\": {\n- \"enable\": false,\n- \"size_min\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 5\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"retina_detect\": true\n-}\n", "emitter.json": "@@ -1,328 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 100,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#000\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#000\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"emitter\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"emitters\": {\n- \"life\": {\n- \"count\": 10,\n- \"delay\": 0.5,\n- \"duration\": 3\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"star\",\n- \"polygon\": {\n- \"sides\": 7\n- }\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 15,\n- \"sync\": false\n- }\n- },\n- \"color\": {\n- \"value\": \"#f0f\"\n- },\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": false\n- },\n- \"move\": {\n- \"speed\": 20,\n- \"random\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- },\n- \"emitters\": [\n- {\n- \"direction\": \"top\",\n- \"position\": {\n- \"x\": 50,\n- \"y\": 105\n- },\n- \"rate\": {\n- \"delay\": 0.1\n- },\n- \"size\": {\n- \"width\": 100,\n- \"height\": 0\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"size\": {\n- \"value\": 400,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 200\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"move\": {\n- \"speed\": 10,\n- \"random\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- },\n- {\n- \"direction\": \"top-right\",\n- \"position\": {\n- \"x\": 0,\n- \"y\": 100\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"star\"\n- },\n- \"color\": {\n- \"value\": \"#f00\"\n- },\n- \"lineLinked\": {\n- \"enable\": true,\n- \"id\": \"emitter1\",\n- \"color\": {\n- \"value\": \"#ff7700\"\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.3\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"counter-clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 15,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": {\n- \"enable\": true\n- }\n- },\n- \"move\": {\n- \"speed\": 10,\n- \"random\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- },\n- {\n- \"direction\": \"top-left\",\n- \"position\": {\n- \"x\": 100,\n- \"y\": 100\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"square\"\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 15,\n- \"sync\": false\n- }\n- },\n- \"color\": {\n- \"value\": \"#00f\"\n- },\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": 0.8\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": false\n- },\n- \"move\": {\n- \"speed\": 20,\n- \"random\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- },\n- {\n- \"life\": {\n- \"count\": 10,\n- \"delay\": 0.5,\n- \"duration\": 3\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"polygon\",\n- \"polygon\": {\n- \"sides\": 6\n- }\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 15,\n- \"sync\": false\n- }\n- },\n- \"color\": {\n- \"value\": \"#0f0\"\n- },\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": false\n- },\n- \"move\": {\n- \"speed\": 20,\n- \"random\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- }\n- ]\n-}\n", "emitterAbsorber.json": "@@ -1,171 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#000\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#000\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- },\n- \"absorbers\": {\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"size\": {\n- \"density\": 20,\n- \"value\": 50,\n- \"limit\": 100,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 30\n- }\n- }\n- },\n- \"emitters\": [\n- {\n- \"direction\": \"top-right\",\n- \"position\": {\n- \"x\": 0,\n- \"y\": 100\n- },\n- \"particles\": {\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"lineLinked\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": 0.3\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"counter-clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 15,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- },\n- \"move\": {\n- \"speed\": 5,\n- \"random\": false,\n- \"outMode\": \"bounce\"\n- }\n- }\n- }\n- ]\n-}\n", "emitterAngled.json": "@@ -1,59 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.3\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- },\n- \"move\": {\n- \"angle\": {\n- \"offset\": 0,\n- \"value\": 30\n- },\n- \"enable\": true,\n- \"speed\": 15,\n- \"direction\": \"top\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#fff\"\n- },\n- \"emitters\": [\n- {\n- \"direction\": \"top\",\n- \"position\": {\n- \"y\": 100\n- },\n- \"life\": {\n- \"duration\": 3,\n- \"delay\": 5,\n- \"count\": 0\n- }\n- }\n- ]\n-}\n", "emitterShapes.json": "@@ -1,100 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": \"#000000\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": 1\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#fff\"\n- },\n- \"emitters\": [\n- {\n- \"shape\": \"square\",\n- \"position\": {\n- \"x\": 33,\n- \"y\": 33\n- },\n- \"size\": {\n- \"width\": 200,\n- \"height\": 200,\n- \"mode\": \"precise\"\n- },\n- \"life\": {\n- \"duration\": 10,\n- \"delay\": 0.5,\n- \"count\": 1\n- }\n- },\n- {\n- \"shape\": \"circle\",\n- \"position\": {\n- \"x\": 67,\n- \"y\": 33\n- },\n- \"size\": {\n- \"width\": 200,\n- \"height\": 200,\n- \"mode\": \"precise\"\n- },\n- \"life\": {\n- \"duration\": 10,\n- \"delay\": 0.5,\n- \"count\": 1\n- }\n- },\n- {\n- \"fill\": false,\n- \"shape\": \"square\",\n- \"position\": {\n- \"x\": 33,\n- \"y\": 67\n- },\n- \"size\": {\n- \"width\": 200,\n- \"height\": 200,\n- \"mode\": \"precise\"\n- },\n- \"life\": {\n- \"duration\": 10,\n- \"delay\": 0.5,\n- \"count\": 1\n- }\n- },\n- {\n- \"fill\": false,\n- \"shape\": \"circle\",\n- \"position\": {\n- \"x\": 67,\n- \"y\": 67\n- },\n- \"size\": {\n- \"width\": 200,\n- \"height\": 200,\n- \"mode\": \"precise\"\n- },\n- \"life\": {\n- \"duration\": 10,\n- \"delay\": 0.5,\n- \"count\": 1\n- }\n- }\n- ]\n-}\n", "fireworks.json": "@@ -1,159 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"fpsLimit\": 120,\n- \"emitters\": {\n- \"direction\": \"top\",\n- \"life\": {\n- \"count\": 0,\n- \"duration\": 0.1,\n- \"delay\": 0.1\n- },\n- \"rate\": {\n- \"delay\": 0.5,\n- \"quantity\": 1\n- },\n- \"size\": {\n- \"width\": 100,\n- \"height\": 0\n- },\n- \"position\": {\n- \"y\": 100,\n- \"x\": 50\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"destroy\": {\n- \"mode\": \"split\",\n- \"split\": {\n- \"count\": 1,\n- \"factor\": {\n- \"value\": 0.333333\n- },\n- \"rate\": {\n- \"value\": 100\n- },\n- \"particles\": {\n- \"stroke\": {\n- \"color\": {\n- \"value\": [\n- \"#ff595e\",\n- \"#ffca3a\",\n- \"#8ac926\",\n- \"#1982c4\",\n- \"#6a4c93\"\n- ]\n- },\n- \"width\": 1\n- },\n- \"number\": {\n- \"value\": 0\n- },\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": {\n- \"min\": 0.1,\n- \"max\": 1\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 0.7,\n- \"sync\": false,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\"\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"value\": 1,\n- \"animation\": {\n- \"enable\": false\n- }\n- },\n- \"life\": {\n- \"count\": 1,\n- \"duration\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 2\n- }\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"enable\": false\n- },\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": true,\n- \"straight\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- }\n- },\n- \"life\": {\n- \"count\": 1\n- },\n- \"shape\": {\n- \"type\": \"line\"\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 0.1,\n- \"max\": 50\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"sync\": true,\n- \"speed\": 90,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\"\n- }\n- },\n- \"stroke\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"width\": 1\n- },\n- \"rotate\": {\n- \"path\": true\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"acceleration\": 15,\n- \"enable\": true,\n- \"inverse\": true,\n- \"maxSpeed\": 100\n- },\n- \"speed\": {\n- \"min\": 10,\n- \"max\": 20\n- },\n- \"outModes\": {\n- \"default\": \"destroy\",\n- \"top\": \"none\"\n- },\n- \"trail\": {\n- \"fillColor\": \"#000\",\n- \"enable\": true,\n- \"length\": 10\n- }\n- }\n- }\n-}\n", "fireworks_2.json": "@@ -1,151 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"fpsLimit\": 120,\n- \"emitters\": {\n- \"direction\": \"top\",\n- \"life\": {\n- \"count\": 0,\n- \"duration\": 0.1,\n- \"delay\": 0.1\n- },\n- \"rate\": {\n- \"delay\": 0.15,\n- \"quantity\": 1\n- },\n- \"size\": {\n- \"width\": 100,\n- \"height\": 0\n- },\n- \"position\": {\n- \"y\": 100,\n- \"x\": 50\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"destroy\": {\n- \"mode\": \"split\",\n- \"split\": {\n- \"count\": 1,\n- \"factor\": {\n- \"value\": 0.333333\n- },\n- \"rate\": {\n- \"value\": 100\n- },\n- \"sizeOffset\": false,\n- \"particles\": {\n- \"stroke\": {\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"width\": 1\n- },\n- \"number\": {\n- \"value\": 0\n- },\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": true,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\"\n- }\n- },\n- \"size\": {\n- \"value\": 75,\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 150,\n- \"minimumValue\": 1,\n- \"destroy\": \"max\",\n- \"startValue\": \"min\",\n- \"sync\": true\n- }\n- },\n- \"life\": {\n- \"count\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"enable\": false\n- },\n- \"speed\": 10,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outMode\": \"destroy\"\n- }\n- }\n- }\n- },\n- \"life\": {\n- \"count\": 1,\n- \"duration\": {\n- \"value\": 1.25\n- }\n- },\n- \"shape\": {\n- \"type\": \"line\"\n- },\n- \"size\": {\n- \"value\": 50,\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 1,\n- \"speed\": 150,\n- \"startValue\": \"max\",\n- \"count\": 1\n- }\n- },\n- \"stroke\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"width\": 1\n- },\n- \"rotate\": {\n- \"path\": true\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"acceleration\": 15,\n- \"enable\": false,\n- \"maxSpeed\": 50,\n- \"inverse\": true\n- },\n- \"speed\": 15,\n- \"outModes\": {\n- \"default\": \"destroy\",\n- \"top\": \"none\"\n- },\n- \"trail\": {\n- \"fillColor\": \"#000\",\n- \"enable\": true,\n- \"length\": 4\n- }\n- }\n- }\n-}\n", "fontawesome.json": "@@ -1,203 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"detectRetina\": true,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"factor\": 1,\n- \"radius\": 0\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": true,\n- \"opacity\": 0.4,\n- \"shadow\": {\n- \"blur\": 5,\n- \"color\": \"lime\",\n- \"enable\": false\n- },\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"collisions\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false,\n- \"trail\": {\n- \"enable\": false,\n- \"length\": 10,\n- \"fillColor\": \"#000000\"\n- }\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"rotate\": {\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 0,\n- \"sync\": false\n- },\n- \"direction\": \"clockwise\",\n- \"random\": false,\n- \"value\": 0\n- },\n- \"shape\": {\n- \"character\": [\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Brands\",\n- \"style\": \"\",\n- \"value\": [\n- \"\\uf179\"\n- ],\n- \"weight\": \"400\"\n- },\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Free\",\n- \"style\": \"\",\n- \"value\": [\n- \"\\uf5d1\"\n- ],\n- \"weight\": \"900\"\n- }\n- ],\n- \"type\": \"char\"\n- },\n- \"stroke\": {\n- \"color\": \"#ffffff\",\n- \"width\": 1\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 10,\n- \"speed\": 10,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"stroke\": {\n- \"color\": \"#ffffff\",\n- \"width\": 0.5\n- }\n- },\n- \"enable\": false,\n- \"inline\": {\n- \"arrangement\": \"one-per-point\"\n- },\n- \"move\": {\n- \"radius\": 10,\n- \"type\": \"path\"\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"backgroundMask\": {\n- \"enable\": false\n- },\n- \"pauseOnBlur\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "forward.json": "@@ -1,115 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"rotate\": {\n- \"path\": true\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"stroke\": {\n- \"width\": 0,\n- \"color\": \"#000000\"\n- },\n- \"shape\": {\n- \"type\": \"image\",\n- \"options\": {\n- \"image\": {\n- \"src\": \"images/arrow.svg\",\n- \"width\": 512,\n- \"height\": 512,\n- \"replaceColor\": true\n- }\n- }\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 32,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#ffffff\"\n- }\n-}\n", "grabRandomColor.json": "@@ -1,112 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"grab\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"color\": \"random\",\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "gradients.json": "@@ -1,112 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 100\n- },\n- \"gradient\": [\n- {\n- \"type\": \"radial\",\n- \"colors\": [\n- {\n- \"stop\": 0.25,\n- \"value\": \"#5bc0eb\"\n- },\n- {\n- \"stop\": 1,\n- \"value\": \"#000000\",\n- \"opacity\": 0\n- }\n- ]\n- },\n- {\n- \"type\": \"radial\",\n- \"colors\": [\n- {\n- \"stop\": 0.25,\n- \"value\": \"#fde74c\"\n- },\n- {\n- \"stop\": 1,\n- \"value\": \"#000000\",\n- \"opacity\": 0\n- }\n- ]\n- },\n- {\n- \"type\": \"radial\",\n- \"colors\": [\n- {\n- \"stop\": 0.25,\n- \"value\": \"#9bc53d\"\n- },\n- {\n- \"stop\": 1,\n- \"value\": \"#000000\",\n- \"opacity\": 0\n- }\n- ]\n- },\n- {\n- \"type\": \"radial\",\n- \"colors\": [\n- {\n- \"stop\": 0.25,\n- \"value\": \"#e55934\"\n- },\n- {\n- \"stop\": 1,\n- \"value\": \"#000000\",\n- \"opacity\": 0\n- }\n- ]\n- },\n- {\n- \"type\": \"radial\",\n- \"colors\": [\n- {\n- \"stop\": 0.25,\n- \"value\": \"#fa7921\"\n- },\n- {\n- \"stop\": 1,\n- \"value\": \"#000000\",\n- \"opacity\": 0\n- }\n- ]\n- }\n- ],\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 15,\n- \"max\": 20\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"direction\": \"none\",\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"background\": {\n- \"color\": \"#000\"\n- }\n-}\n", "gravity.json": "@@ -1,193 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"destroy\": {\n- \"mode\": \"split\",\n- \"split\": {\n- \"count\": 1,\n- \"factor\": {\n- \"value\": {\n- \"min\": 4,\n- \"max\": 9\n- }\n- },\n- \"particles\": {\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"destroy\": {\n- \"mode\": \"none\"\n- },\n- \"life\": {\n- \"count\": 1\n- }\n- }\n- }\n- },\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"life\": {\n- \"duration\": {\n- \"sync\": true,\n- \"value\": 5\n- },\n- \"count\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"enable\": true\n- },\n- \"speed\": 10,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"bottom\": \"split\",\n- \"left\": \"destroy\",\n- \"right\": \"destroy\",\n- \"top\": \"none\"\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#000000\",\n- \"length\": 10\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"emitters\": {\n- \"direction\": \"top\",\n- \"life\": {\n- \"count\": 0,\n- \"duration\": 5,\n- \"delay\": 2\n- },\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 1\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"particles\": {\n- \"bounce\": {\n- \"vertical\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimValue\": 0.4\n- }\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"move\": {\n- \"speed\": 10,\n- \"random\": false\n- }\n- }\n- }\n-}\n", "growing.json": "@@ -1,123 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"emitters\": {\n- \"direction\": \"top\",\n- \"size\": {\n- \"width\": 100,\n- \"height\": 0\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 100\n- },\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 2\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 0,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 20,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"size_min\": 0.1,\n- \"sync\": true,\n- \"startValue\": \"min\",\n- \"destroy\": \"max\"\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"destroy\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "hexagonPath.json": "@@ -1,87 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#FF0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 10\n- }\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": true,\n- \"rotate\": {\n- \"distance\": 100,\n- \"x\": 2000,\n- \"y\": 2000\n- }\n- },\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"path\": {\n- \"clamp\": false,\n- \"enable\": true,\n- \"delay\": {\n- \"value\": 0\n- },\n- \"generator\": \"polygonPathGenerator\",\n- \"options\": {\n- \"sides\": 6,\n- \"turnSteps\": 30,\n- \"angle\": 30\n- }\n- },\n- \"random\": false,\n- \"speed\": 3,\n- \"straight\": false,\n- \"trail\": {\n- \"fillColor\": \"#000\",\n- \"length\": 20,\n- \"enable\": true\n- }\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"value\": 0\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"value\": 2\n- }\n- },\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"detectRetina\": true,\n- \"emitters\": {\n- \"direction\": \"none\",\n- \"rate\": {\n- \"quantity\": 1,\n- \"delay\": 0.25\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- }\n-}\n", "hollowknight.json": "@@ -1,151 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": false,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 2,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"size\": 6\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"radius\": 0,\n- \"factor\": 1\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 25,\n- \"enable\": true,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": false,\n- \"area\": 2000\n- },\n- \"limit\": 0,\n- \"value\": 400\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.05,\n- \"speed\": 2,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.4\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 1\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": true,\n- \"lineColor\": \"rgba(255,255,255,0.2)\",\n- \"lineWidth\": 0.5\n- },\n- \"enable\": true,\n- \"move\": {\n- \"radius\": 10\n- },\n- \"inline\": {\n- \"arrangement\": \"equidistant\"\n- },\n- \"scale\": 2,\n- \"type\": \"inline\",\n- \"url\": \"https://particles.js.org/images/hollowknight.svg\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "hyperspace.json": "@@ -1,87 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": [\n- \"#3998D0\",\n- \"#2EB6AF\",\n- \"#A9BD33\",\n- \"#FEC73B\",\n- \"#F89930\",\n- \"#F45623\",\n- \"#D62E32\",\n- \"#EB586E\",\n- \"#9952CF\"\n- ]\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 800,\n- \"y\": 800\n- }\n- },\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"random\": false,\n- \"speed\": 3,\n- \"straight\": false,\n- \"trail\": {\n- \"fillColor\": \"#000\",\n- \"length\": 30,\n- \"enable\": true\n- }\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"value\": 0\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"value\": 25,\n- \"animation\": {\n- \"startValue\": \"min\",\n- \"enable\": true,\n- \"minimumValue\": 1,\n- \"speed\": 2,\n- \"destroy\": \"max\",\n- \"sync\": true\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"emitters\": {\n- \"direction\": \"none\",\n- \"rate\": {\n- \"quantity\": 5,\n- \"delay\": 0.3\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- }\n-}\n", "images.json": "@@ -1,249 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#000\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 1\n- },\n- \"rotate\": {\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- },\n- \"direction\": \"random\",\n- \"random\": true,\n- \"value\": 0\n- },\n- \"shape\": {\n- \"character\": {\n- \"fill\": false,\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"value\": \"*\",\n- \"weight\": \"400\"\n- },\n- \"image\": [\n- {\n- \"src\": \"https://particles.js.org/images/fruits/apple.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/avocado.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/banana.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/berries.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/cherry.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/grapes.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/lemon.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/orange.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/peach.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/pear.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/pepper.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/plum.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/star.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/strawberry.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/watermelon.png\",\n- \"width\": 32,\n- \"height\": 32\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/watermelon_slice.png\",\n- \"width\": 32,\n- \"height\": 32\n- }\n- ],\n- \"polygon\": {\n- \"sides\": 5\n- },\n- \"stroke\": {\n- \"color\": \"#000000\",\n- \"width\": 0\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "imagesDirections.json": "@@ -1,122 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"push\": {\n- \"quantity\": 4\n- }\n- }\n- },\n- \"particles\": {\n- \"move\": {\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"speed\": 2\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"rotate\": {\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- },\n- \"direction\": \"random\",\n- \"random\": true,\n- \"value\": 0\n- },\n- \"shape\": {\n- \"type\": \"image\",\n- \"options\": {\n- \"image\": [\n- {\n- \"src\": \"https://particles.js.org/images/fruits/apple.png\",\n- \"width\": 32,\n- \"height\": 32,\n- \"particles\": {\n- \"move\": {\n- \"direction\": \"top\"\n- }\n- }\n- },\n- {\n- \"src\": \"https://particles.js.org/images/fruits/avocado.png\",\n- \"width\": 32,\n- \"height\": 32,\n- \"particles\": {\n- \"move\": {\n- \"direction\": \"bottom\"\n- }\n- }\n- }\n- ]\n- }\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "index.json": "@@ -1,113 +0,0 @@\n-{\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"fullScreen\": {\n- \"enable\": true\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"mode\": \"trail\",\n- \"enable\": true\n- }\n- },\n- \"modes\": {\n- \"trail\": {\n- \"delay\": 0.005,\n- \"quantity\": 5,\n- \"pauseOnStop\": true,\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 400,\n- \"sync\": true\n- }\n- },\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"move\": {\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"speed\": 2\n- },\n- \"size\": {\n- \"value\": 5,\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"minimumValue\": 1,\n- \"sync\": true,\n- \"startValue\": \"min\",\n- \"destroy\": \"max\"\n- }\n- }\n- }\n- }\n- },\n- \"resize\": true\n- },\n- \"motion\": {\n- \"reduce\": {\n- \"value\": true\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"animation\": {\n- \"enable\": true,\n- \"sync\": false,\n- \"speed\": 50\n- },\n- \"value\": \"#f00\"\n- },\n- \"links\": {\n- \"color\": \"random\",\n- \"enable\": true\n- },\n- \"collisions\": {\n- \"enable\": true\n- },\n- \"move\": {\n- \"enable\": true\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true\n- }\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.3,\n- \"speed\": 0.5\n- },\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.3\n- }\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 1,\n- \"speed\": 3\n- },\n- \"value\": 3,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 1\n- }\n- }\n- }\n-}\n", "infection.json": "@@ -1,109 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"infection\": {\n- \"enable\": true,\n- \"infections\": 10,\n- \"cure\": true,\n- \"stages\": [\n- {\n- \"color\": \"#ff0000\",\n- \"duration\": 1\n- },\n- {\n- \"color\": \"#ffa500\",\n- \"duration\": 1,\n- \"rate\": 2\n- },\n- {\n- \"color\": \"#ffff00\",\n- \"duration\": 1,\n- \"rate\": 2\n- },\n- {\n- \"color\": \"#008000\",\n- \"duration\": 1,\n- \"rate\": 3\n- },\n- {\n- \"color\": \"#0000ff\",\n- \"duration\": 1,\n- \"rate\": 4\n- },\n- {\n- \"color\": \"#4b0082\",\n- \"duration\": 1,\n- \"rate\": 5\n- },\n- {\n- \"color\": \"#ee82ee\",\n- \"duration\": 1,\n- \"rate\": 6,\n- \"infectedStage\": 0\n- }\n- ]\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 400,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"collisions\": true,\n- \"enable\": true,\n- \"speed\": 20,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"bounce\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "life.json": "@@ -1,125 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 160,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- },\n- \"life\": {\n- \"duration\": {\n- \"sync\": false,\n- \"value\": 3\n- },\n- \"count\": 0,\n- \"delay\": {\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 1\n- },\n- \"value\": 2\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "lightHover.json": "@@ -1,80 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 30,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": [\n- \"circle\",\n- \"square\"\n- ]\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": 30,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 15\n- }\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"speed\": 5,\n- \"enable\": true\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"out_mode\": \"out\"\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"light\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"light\": {\n- \"area\": {\n- \"gradient\": {\n- \"start\": \"3b5e98\",\n- \"stop\": \"#17163e\"\n- }\n- },\n- \"shadow\": {\n- \"color\": \"#17163e\"\n- }\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#17163e\"\n- }\n-}\n", "linkTriangles.json": "@@ -1,116 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"random\",\n- \"opacity\": 0.4,\n- \"width\": 1,\n- \"triangles\": {\n- \"enable\": true,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.1\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "localPolygonMask.json": "@@ -1,142 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": false,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 2,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"size\": 6\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"radius\": 0,\n- \"factor\": 1\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 30,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": false,\n- \"area\": 2000\n- },\n- \"limit\": 0,\n- \"value\": 200\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"value\": 3\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": true,\n- \"lineColor\": \"rgba(255,255,255,1)\",\n- \"lineWidth\": 1\n- },\n- \"enable\": true,\n- \"move\": {\n- \"radius\": 10\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"inline\": {\n- \"arrangement\": \"equidistant\"\n- },\n- \"scale\": 3,\n- \"type\": \"inside\",\n- \"data\": \"<svg xmlns=\\\"http://www.w3.org/2000/svg\\\" xmlns:xlink=\\\"http://www.w3.org/1999/xlink\\\" height=\\\"210\\\" width=\\\"400\\\"><path d=\\\"M150 0 L75 200 L225 200 Z\\\" /></svg>\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "manual.json": "@@ -1,137 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"manualParticles\": [\n- {\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- },\n- {\n- \"position\": {\n- \"x\": 25,\n- \"y\": 25\n- }\n- },\n- {\n- \"position\": {\n- \"x\": 75,\n- \"y\": 75\n- }\n- },\n- {\n- \"position\": {\n- \"x\": 25,\n- \"y\": 75\n- }\n- },\n- {\n- \"position\": {\n- \"x\": 75,\n- \"y\": 25\n- }\n- }\n- ],\n- \"particles\": {\n- \"number\": {\n- \"value\": 0,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 30\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"top\",\n- \"random\": false,\n- \"straight\": true,\n- \"out_mode\": \"out\",\n- \"warp\": true,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "motionDisable.json": "@@ -1,114 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"motion\": {\n- \"disable\": true\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "motionReduce.json": "@@ -1,118 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"motion\": {\n- \"disable\": false,\n- \"reduce\": {\n- \"value\": true,\n- \"factor\": 6\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "mouseAttract.json": "@@ -1,116 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"attract\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"attract\": {\n- \"distance\": 600,\n- \"duration\": 0.4,\n- \"speed\": 3\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "mouseBounce.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"bounce\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"bounce\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "mouseFollow.json": "@@ -1,80 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"background\": {\n- \"color\": \"#000000\"\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": [\n- \"bubble\",\n- \"connect\"\n- ]\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 200,\n- \"duration\": 2,\n- \"opacity\": 1,\n- \"size\": 30,\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- }\n- },\n- \"connect\": {\n- \"distance\": 60,\n- \"lineLinked\": {\n- \"opacity\": 0.2\n- },\n- \"radius\": 200\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#000000\"\n- },\n- \"move\": {\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- },\n- \"value\": 300\n- },\n- \"opacity\": {\n- \"value\": 0\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"value\": 15\n- }\n- },\n- \"retina_detect\": true\n-}\n", "mouseTrail.json": "@@ -1,135 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"trail\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- },\n- \"trail\": {\n- \"delay\": 0.01,\n- \"pauseOnStop\": true,\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#00ff00\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 200,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"move\": {\n- \"outMode\": \"destroy\"\n- },\n- \"size\": {\n- \"random\": true,\n- \"value\": 10\n- }\n- }\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "moveAngle.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": -30,\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "moveDistance.json": "@@ -1,112 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"distance\": 50,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outMode\": \"none\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "multiplePolygonMasks.json": "@@ -1,161 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": false,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 2,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"size\": 6\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"radius\": 0,\n- \"factor\": 1\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": [\n- \"#4285f4\",\n- \"#34A853\",\n- \"#FBBC05\",\n- \"#EA4335\"\n- ]\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"random\",\n- \"consent\": false,\n- \"distance\": 40,\n- \"enable\": true,\n- \"opacity\": 1,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": false,\n- \"area\": 2000\n- },\n- \"limit\": 0,\n- \"value\": 200\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.05,\n- \"speed\": 2,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.4\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 1\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"rgba(255,255,255,0.2)\",\n- \"lineWidth\": 0.5\n- },\n- \"enable\": true,\n- \"move\": {\n- \"radius\": 10\n- },\n- \"position\": {\n- \"x\": 30,\n- \"y\": 30\n- },\n- \"inline\": {\n- \"arrangement\": \"equidistant\"\n- },\n- \"scale\": 1,\n- \"type\": \"inline\",\n- \"url\": \"https://particles.js.org/images/google.svg\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "nasa.json": "@@ -1,108 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 160,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 1,\n- \"opacity_min\": 0,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 4,\n- \"size_min\": 0.3,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 1,\n- \"direction\": \"none\",\n- \"random\": true,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"bounce\": false,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 600\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 250,\n- \"size\": 0,\n- \"duration\": 2,\n- \"opacity\": 0\n- },\n- \"repulse\": {\n- \"distance\": 400,\n- \"duration\": 0.4\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#232741\",\n- \"image\": \"url('http://upload.wikimedia.org/wikipedia/commons/thumb/e/e5/NASA_logo.svg/1237px-NASA_logo.svg.png')\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"20%\"\n- }\n-}\n", "noconfig.json": "@@ -1,6 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- }\n-}\n", "noisePlanes.json": "@@ -1,94 +0,0 @@\n-{\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40,\n- \"speed\": 3\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"links\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"move\": {\n- \"path\": {\n- \"enable\": true,\n- \"options\": {\n- \"size\": 20,\n- \"draw\": false,\n- \"increment\": 0.004\n- },\n- \"generator\": \"perlinNoise\"\n- },\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"out\"\n- },\n- \"speed\": 6\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"value\": 80\n- },\n- \"rotate\": {\n- \"value\": 45,\n- \"path\": true\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"shape\": {\n- \"image\": {\n- \"height\": 128,\n- \"src\": \"https://particles.js.org/images/plane_alt.png\",\n- \"width\": 128\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"value\": 32\n- },\n- \"zIndex\": {\n- \"value\": {\n- \"min\": 0,\n- \"max\": 100\n- },\n- \"opacityRate\": 2,\n- \"sizeRate\": 2,\n- \"velocityRate\": 2\n- }\n- },\n- \"detectRetina\": true\n-}\n", "nyancat.json": "@@ -1,120 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 1,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"image\",\n- \"stroke\": {\n- \"width\": 0,\n- \"color\": \"#000000\"\n- },\n- \"polygon\": {\n- \"nb_sides\": 5\n- },\n- \"image\": {\n- \"src\": \"https://cdn2.scratch.mit.edu/get_image/gallery/780516_170x100.png\",\n- \"width\": 1750,\n- \"height\": 800\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 240,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"right\",\n- \"random\": false,\n- \"straight\": true,\n- \"out_mode\": \"out\",\n- \"bounce\": false,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"grab\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 200,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 8\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"url('http://fc06.deviantart.net/fs71/f/2011/187/1/0/nyan_cat_background_by_kento1-d3l6i50.jpg')\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "nyancat2.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 100,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"star\",\n- \"options\": {\n- \"sides\": 5\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 4,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"left\",\n- \"random\": false,\n- \"straight\": true,\n- \"out_mode\": \"out\",\n- \"bounce\": false,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"grab\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 200,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 8\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#043564\",\n- \"image\": \"url('http://vincentgarreau.com/particles.js/assets/img/kbLd9vb_new.gif')\",\n- \"position\": \"0 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"60%\"\n- }\n-}\n", "orbit.json": "@@ -1,75 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"out\"\n- },\n- \"speed\": 3\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 300,\n- \"value\": 100\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"orbit\": {\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 10\n- },\n- \"enable\": true,\n- \"opacity\": 1,\n- \"color\": \"#ff7700\",\n- \"rotation\": {\n- \"random\": {\n- \"enable\": true\n- }\n- }\n- },\n- \"shape\": {\n- \"type\": [\n- \"circle\",\n- \"square\"\n- ]\n- },\n- \"size\": {\n- \"value\": 10\n- }\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "parallax.json": "@@ -1,111 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 100,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"grab\",\n- \"parallax\": {\n- \"enable\": true,\n- \"smooth\": 10,\n- \"force\": 60\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "pathPolygonMask.json": "@@ -1,162 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": false,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 2,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"size\": 6\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"radius\": 0,\n- \"factor\": 1\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 30,\n- \"enable\": true,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": false,\n- \"area\": 2000\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.05,\n- \"speed\": 2,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.4\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 1\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": true,\n- \"lineColor\": \"rgba(255,255,255,0.2)\",\n- \"lineWidth\": 0.5\n- },\n- \"enable\": true,\n- \"move\": {\n- \"radius\": 10\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"inline\": {\n- \"arrangement\": \"equidistant\"\n- },\n- \"scale\": 2,\n- \"type\": \"inline\",\n- \"data\": {\n- \"path\": \"M150 0 L75 200 L225 200 Z\",\n- \"size\": {\n- \"width\": 400,\n- \"height\": 210\n- }\n- }\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "planes.json": "@@ -1,112 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40,\n- \"speed\": 3\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"out_mode\": \"out\",\n- \"random\": false,\n- \"speed\": 6,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- },\n- \"value\": 80\n- },\n- \"rotate\": {\n- \"value\": 45,\n- \"path\": true\n- },\n- \"opacity\": {\n- \"anim\": {\n- \"enable\": false,\n- \"opacity_min\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 1\n- },\n- \"shape\": {\n- \"image\": {\n- \"height\": 128,\n- \"src\": \"images/plane_alt.png\",\n- \"width\": 128\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"anim\": {\n- \"enable\": false,\n- \"size_min\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 16\n- },\n- \"value\": 32\n- }\n- },\n- \"retina_detect\": true\n-}\n", "plasma.json": "@@ -1,118 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"background\": {\n- \"color\": {\n- \"value\": \"#000000\"\n- }\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 150,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\",\n- \"stroke\": {\n- \"width\": 0,\n- \"color\": \"#000000\"\n- },\n- \"polygon\": {\n- \"nb_sides\": 5\n- },\n- \"image\": {\n- \"src\": \"img/github.svg\",\n- \"width\": 100,\n- \"height\": 100\n- }\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 0,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 110,\n- \"color\": \"#19f\",\n- \"opacity\": 0.4,\n- \"width\": 2\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 50,\n- \"direction\": \"none\",\n- \"random\": true,\n- \"straight\": false,\n- \"out_mode\": \"bounce\",\n- \"bounce\": false,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 200,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 200,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 8\n- },\n- \"repulse\": {\n- \"distance\": 150,\n- \"duration\": 0.4\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true\n-}\n", "polygonMask.json": "@@ -1,156 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": false,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 2,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"size\": 6\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- },\n- \"slow\": {\n- \"active\": false,\n- \"radius\": 0,\n- \"factor\": 1\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 30,\n- \"enable\": true,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": false,\n- \"area\": 2000\n- },\n- \"limit\": 0,\n- \"value\": 200\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": true,\n- \"minimumValue\": 0.05,\n- \"speed\": 2,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.4\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": true,\n- \"value\": 1\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": true,\n- \"lineColor\": \"rgba(255,255,255,0.2)\",\n- \"lineWidth\": 0.5\n- },\n- \"enable\": true,\n- \"move\": {\n- \"radius\": 10\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"inline\": {\n- \"arrangement\": \"equidistant\"\n- },\n- \"scale\": 0.5,\n- \"type\": \"inline\",\n- \"url\": \"https://particles.js.org/images/smalldeer.svg\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "polygons.json": "@@ -1,175 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"polygon\",\n- \"stroke\": {\n- \"width\": 0,\n- \"color\": \"#000000\"\n- },\n- \"polygon\": [\n- {\n- \"nb_sides\": 3,\n- \"particles\": {\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 0.5\n- }\n- },\n- \"size\": {\n- \"value\": 12,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- }\n- },\n- \"color\": {\n- \"value\": \"ff0\"\n- }\n- }\n- },\n- {\n- \"nb_sides\": 5,\n- \"particles\": {\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": 8,\n- \"random\": {\n- \"enable\": false\n- }\n- },\n- \"color\": {\n- \"value\": \"0f0\"\n- }\n- }\n- },\n- {\n- \"nb_sides\": 8,\n- \"particles\": {\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false\n- },\n- \"size\": {\n- \"value\": 20,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 15\n- }\n- },\n- \"color\": {\n- \"value\": \"f00\"\n- }\n- }\n- }\n- ]\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "random.json": "@@ -1,146 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"connect\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"random\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#ffffff\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 6,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 500,\n- \"value\": 300\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"value\": 15\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "repulse.json": "@@ -1,157 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"groups\": {\n- \"green\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"color\": {\n- \"value\": \"#00ff00\"\n- },\n- \"repulse\": {\n- \"enabled\": true,\n- \"distance\": 50,\n- \"factor\": 20\n- }\n- },\n- \"yellow\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"color\": {\n- \"value\": \"#ffff00\"\n- },\n- \"repulse\": {\n- \"enabled\": false,\n- \"distance\": 0\n- }\n- },\n- \"blue\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"color\": {\n- \"value\": \"#0000ff\"\n- },\n- \"repulse\": {\n- \"enabled\": true,\n- \"distance\": 50\n- }\n- },\n- \"cyan\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"color\": {\n- \"value\": \"#00ffff\"\n- },\n- \"repulse\": {\n- \"enabled\": false,\n- \"distance\": 0\n- }\n- }\n- },\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 14,\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 7\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- },\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"links\": {\n- \"opacity\": 1\n- }\n- },\n- \"repulse\": {\n- \"distance\": 100\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "repulseBack.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-back\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseCirc.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-circ\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseCubic.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-cubic\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseExpo.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-expo\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseQuart.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-quart\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseQuint.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-quint\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "repulseSine.json": "@@ -1,62 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 3\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"repulse\": {\n- \"distance\": 200,\n- \"factor\": 1,\n- \"speed\": 5,\n- \"easing\": \"ease-out-sine\"\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "responsive.json": "@@ -1,130 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": false,\n- \"area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"responsive\": [\n- {\n- \"maxWidth\": 600,\n- \"options\": {\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#0000ff\"\n- },\n- \"number\": {\n- \"value\": 40\n- }\n- }\n- }\n- },\n- {\n- \"maxWidth\": 1000,\n- \"options\": {\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#00ff00\"\n- },\n- \"number\": {\n- \"value\": 60\n- }\n- }\n- }\n- }\n- ],\n- \"background\": {\n- \"color\": \"#000000\"\n- }\n-}\n", "ring.json": "@@ -1,69 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0,\n- \"limit\": 1000\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"move\": {\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"speed\": 1,\n- \"path\": {\n- \"enable\": true,\n- \"delay\": {\n- \"value\": 0.75\n- }\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#031927\",\n- \"length\": 1000\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.05\n- },\n- \"size\": {\n- \"value\": 1\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#031927\"\n- },\n- \"emitters\": {\n- \"fill\": false,\n- \"shape\": \"circle\",\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"size\": {\n- \"width\": 250,\n- \"height\": 250,\n- \"mode\": \"precise\"\n- },\n- \"life\": {\n- \"delay\": 10,\n- \"wait\": true\n- },\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 10\n- },\n- \"startCount\": 1000\n- }\n-}\n", "seaAnemone.json": "@@ -1,93 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#FF0000\"\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": true,\n- \"rotate\": {\n- \"distance\": 100,\n- \"x\": 2000,\n- \"y\": 2000\n- }\n- },\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"path\": {\n- \"clamp\": false,\n- \"enable\": true,\n- \"delay\": {\n- \"value\": 0\n- },\n- \"generator\": \"curvesPathGenerator\"\n- },\n- \"random\": false,\n- \"speed\": 1,\n- \"straight\": false,\n- \"trail\": {\n- \"fillColor\": \"#000\",\n- \"length\": 30,\n- \"enable\": true\n- }\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"value\": 0\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"value\": 10,\n- \"animation\": {\n- \"count\": 1,\n- \"startValue\": \"min\",\n- \"enable\": true,\n- \"minimumValue\": 1,\n- \"speed\": 10,\n- \"sync\": true\n- }\n- }\n- },\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"detectRetina\": true,\n- \"emitters\": {\n- \"direction\": \"none\",\n- \"rate\": {\n- \"quantity\": 5,\n- \"delay\": 0.3\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"spawnColor\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 10\n- }\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- }\n- }\n-}\n", "shadow.json": "@@ -1,125 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shadow\": {\n- \"enable\": true,\n- \"color\": \"#000000\",\n- \"blur\": 5,\n- \"offset\": {\n- \"x\": 3,\n- \"y\": 3\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1,\n- \"shadow\": {\n- \"enable\": true,\n- \"blur\": 5,\n- \"color\": \"#000000\"\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"fpsLimit\": 120,\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "slow.json": "@@ -1,115 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"slow\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- },\n- \"slow\": {\n- \"radius\": 100,\n- \"factor\": 3\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "snow.json": "@@ -1,107 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 400,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#fff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": false,\n- \"distance\": 500,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 2\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"bottom\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"size\": true,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 0.5\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 4,\n- \"duration\": 0.3,\n- \"opacity\": 1\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "speedDecay.json": "@@ -1,172 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 15,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 10\n- },\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"life\": {\n- \"duration\": {\n- \"sync\": true,\n- \"value\": 5\n- },\n- \"count\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"gravity\": {\n- \"enable\": true\n- },\n- \"speed\": 10,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"bounce\",\n- \"bottom\": \"bounce\",\n- \"left\": \"destroy\",\n- \"right\": \"destroy\",\n- \"top\": \"none\"\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#000000\",\n- \"length\": 10\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"emitters\": {\n- \"direction\": \"top\",\n- \"life\": {\n- \"count\": 0,\n- \"duration\": 5,\n- \"delay\": 2\n- },\n- \"rate\": {\n- \"delay\": 0.1,\n- \"quantity\": 1\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"particles\": {\n- \"bounce\": {\n- \"vertical\": {\n- \"value\": 0.8,\n- \"random\": {\n- \"enable\": true,\n- \"minimValue\": 0.4\n- }\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"links\": {\n- \"enable\": false\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": {\n- \"enable\": true,\n- \"minimumValue\": 5\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.5\n- },\n- \"move\": {\n- \"speed\": 30,\n- \"random\": false,\n- \"decay\": 0.1\n- }\n- }\n- }\n-}\n", "spin.json": "@@ -1,115 +0,0 @@\n-{\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 10,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": {\n- \"min\": 1,\n- \"max\": 5\n- },\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outMode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"spin\": {\n- \"acceleration\": {\n- \"min\": -1,\n- \"max\": 1\n- },\n- \"enable\": true\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#000\",\n- \"length\": 30\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "star.json": "@@ -1,123 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 10,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#fff\"\n- },\n- \"shape\": {\n- \"type\": \"star\",\n- \"options\": {\n- \"star\": {\n- \"sides\": 5\n- }\n- }\n- },\n- \"opacity\": {\n- \"value\": 0.8,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 1,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 4,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 40,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"rotate\": {\n- \"value\": 0,\n- \"random\": true,\n- \"direction\": \"clockwise\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 600,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 2\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"grab\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"bubble\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1,\n- \"color\": \"#f00\"\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 8,\n- \"color\": \"#ffff00\"\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#111\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "starry.json": "@@ -1,23 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true\n- },\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"particles\": {\n- \"opacity\": {\n- \"value\": {\n- \"min\": 0.1,\n- \"max\": 1\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 0.25\n- }\n- },\n- \"size\": {\n- \"value\": 1\n- }\n- }\n-}\n", "strokeAnimation.json": "@@ -1,122 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60,\n- \"sync\": true\n- }\n- },\n- \"stroke\": {\n- \"width\": 3,\n- \"color\": {\n- \"value\": \"#0000ff\",\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60,\n- \"sync\": true\n- }\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "svgReplace.json": "@@ -1,175 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"elementId\": \"repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"lineLinked\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ff0000\"\n- },\n- \"lineLinked\": {\n- \"blink\": false,\n- \"color\": \"#000\",\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"outMode\": \"out\",\n- \"random\": false,\n- \"speed\": 2,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"rotate\": {\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": false\n- },\n- \"direction\": \"random\",\n- \"random\": true,\n- \"value\": 0\n- },\n- \"shape\": {\n- \"character\": {\n- \"fill\": false,\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"value\": \"*\",\n- \"weight\": \"400\"\n- },\n- \"image\": [\n- {\n- \"src\": \"/images/canine.svg\",\n- \"width\": 32,\n- \"height\": 32,\n- \"replaceColor\": true\n- }\n- ],\n- \"polygon\": {\n- \"sides\": 5\n- },\n- \"stroke\": {\n- \"color\": \"#000000\",\n- \"width\": 0\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"background\": {\n- \"color\": \"#fff\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "test.json": "@@ -1,546 +0,0 @@\n-{\n- \"autoPlay\": true,\n- \"background\": {\n- \"color\": {\n- \"value\": \"#0d47a1\"\n- },\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\",\n- \"opacity\": 1\n- },\n- \"backgroundMask\": {\n- \"composite\": \"destination-out\",\n- \"cover\": {\n- \"color\": {\n- \"value\": \"#fff\"\n- },\n- \"opacity\": 1\n- },\n- \"enable\": false\n- },\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 1\n- },\n- \"detectRetina\": true,\n- \"fpsLimit\": 120,\n- \"infection\": {\n- \"cure\": false,\n- \"delay\": 0,\n- \"enable\": false,\n- \"infections\": 0,\n- \"stages\": []\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onDiv\": {\n- \"selectors\": \"#repulse-div\",\n- \"enable\": false,\n- \"mode\": \"repulse\",\n- \"type\": \"circle\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"attract\": {\n- \"distance\": 200,\n- \"duration\": 0.4,\n- \"easing\": \"ease-out-quad\",\n- \"factor\": 1,\n- \"maxSpeed\": 50,\n- \"speed\": 1\n- },\n- \"bounce\": {\n- \"distance\": 200\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"connect\": {\n- \"distance\": 80,\n- \"links\": {\n- \"opacity\": 0.5\n- },\n- \"radius\": 60\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"links\": {\n- \"blink\": false,\n- \"consent\": false,\n- \"opacity\": 1\n- }\n- },\n- \"light\": {\n- \"area\": {\n- \"gradient\": {\n- \"start\": {\n- \"value\": \"#ffffff\"\n- },\n- \"stop\": {\n- \"value\": \"#000000\"\n- }\n- },\n- \"radius\": 1000\n- },\n- \"shadow\": {\n- \"color\": {\n- \"value\": \"#000000\"\n- },\n- \"length\": 2000\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4,\n- \"factor\": 100,\n- \"speed\": 1,\n- \"maxSpeed\": 50,\n- \"easing\": \"ease-out-quad\"\n- },\n- \"slow\": {\n- \"factor\": 1,\n- \"radius\": 0\n- },\n- \"trail\": {\n- \"delay\": 1,\n- \"pauseOnStop\": false,\n- \"quantity\": 1\n- }\n- }\n- },\n- \"manualParticles\": [],\n- \"motion\": {\n- \"disable\": false,\n- \"reduce\": {\n- \"factor\": 4,\n- \"value\": true\n- }\n- },\n- \"particles\": {\n- \"bounce\": {\n- \"horizontal\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1\n- },\n- \"value\": 1\n- },\n- \"vertical\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1\n- },\n- \"value\": 1\n- }\n- },\n- \"collisions\": {\n- \"bounce\": {\n- \"horizontal\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1\n- },\n- \"value\": 1\n- },\n- \"vertical\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1\n- },\n- \"value\": 1\n- }\n- },\n- \"enable\": false,\n- \"mode\": \"bounce\",\n- \"overlap\": {\n- \"enable\": true,\n- \"retries\": 0\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\",\n- \"animation\": {\n- \"h\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- },\n- \"s\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- },\n- \"l\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- }\n- }\n- },\n- \"destroy\": {\n- \"mode\": \"none\",\n- \"split\": {\n- \"count\": 1,\n- \"factor\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": 3\n- },\n- \"rate\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": {\n- \"min\": 4,\n- \"max\": 9\n- }\n- },\n- \"sizeOffset\": true\n- }\n- },\n- \"life\": {\n- \"count\": 0,\n- \"delay\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": 0,\n- \"sync\": false\n- },\n- \"duration\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 1\n- },\n- \"value\": 0,\n- \"sync\": false\n- }\n- },\n- \"links\": {\n- \"blink\": false,\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"consent\": false,\n- \"distance\": 150,\n- \"enable\": true,\n- \"frequency\": 1,\n- \"opacity\": 0.4,\n- \"shadow\": {\n- \"blur\": 5,\n- \"color\": {\n- \"value\": \"lime\"\n- },\n- \"enable\": false\n- },\n- \"triangles\": {\n- \"enable\": false,\n- \"frequency\": 1\n- },\n- \"width\": 1,\n- \"warp\": false\n- },\n- \"move\": {\n- \"angle\": {\n- \"offset\": 0,\n- \"value\": 90\n- },\n- \"attract\": {\n- \"distance\": 200,\n- \"enable\": false,\n- \"rotate\": {\n- \"x\": 600,\n- \"y\": 1200\n- }\n- },\n- \"decay\": 0,\n- \"direction\": \"none\",\n- \"drift\": 0,\n- \"enable\": false,\n- \"gravity\": {\n- \"acceleration\": 9.81,\n- \"enable\": false,\n- \"inverse\": false,\n- \"maxSpeed\": 50\n- },\n- \"path\": {\n- \"clamp\": true,\n- \"delay\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": 0\n- },\n- \"enable\": false\n- },\n- \"outModes\": {\n- \"default\": \"out\",\n- \"bottom\": \"out\",\n- \"left\": \"out\",\n- \"right\": \"out\",\n- \"top\": \"out\"\n- },\n- \"random\": false,\n- \"size\": false,\n- \"speed\": 1,\n- \"straight\": true,\n- \"trail\": {\n- \"enable\": false,\n- \"length\": 10,\n- \"fillColor\": {\n- \"value\": \"#000000\"\n- }\n- },\n- \"vibrate\": false,\n- \"warp\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"area\": 800,\n- \"factor\": 1000\n- },\n- \"limit\": 0,\n- \"value\": 80\n- },\n- \"opacity\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1\n- },\n- \"value\": {\n- \"min\": 0,\n- \"max\": 1\n- },\n- \"animation\": {\n- \"count\": 1,\n- \"enable\": true,\n- \"speed\": 1,\n- \"sync\": true,\n- \"destroy\": \"none\",\n- \"minimumValue\": 0,\n- \"startValue\": \"min\"\n- }\n- },\n- \"reduceDuplicates\": false,\n- \"roll\": {\n- \"darken\": {\n- \"enable\": false,\n- \"value\": 0\n- },\n- \"enable\": false,\n- \"enlighten\": {\n- \"enable\": false,\n- \"value\": 0\n- },\n- \"speed\": 25\n- },\n- \"rotate\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": 0,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 0,\n- \"sync\": false\n- },\n- \"direction\": \"clockwise\",\n- \"path\": false\n- },\n- \"shadow\": {\n- \"blur\": 0,\n- \"color\": {\n- \"value\": \"#000000\"\n- },\n- \"enable\": false,\n- \"offset\": {\n- \"x\": 0,\n- \"y\": 0\n- }\n- },\n- \"shape\": {\n- \"options\": {\n- \"character\": [\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Brands\",\n- \"style\": \"\",\n- \"value\": [\n- \"\uf179\"\n- ],\n- \"weight\": \"400\"\n- },\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Free\",\n- \"style\": \"\",\n- \"value\": [\n- \"\uf5d1\"\n- ],\n- \"weight\": \"900\"\n- }\n- ],\n- \"char\": [\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Brands\",\n- \"style\": \"\",\n- \"value\": [\n- \"\uf179\"\n- ],\n- \"weight\": \"400\"\n- },\n- {\n- \"fill\": true,\n- \"font\": \"Font Awesome 5 Free\",\n- \"style\": \"\",\n- \"value\": [\n- \"\uf5d1\"\n- ],\n- \"weight\": \"900\"\n- }\n- ],\n- \"polygon\": {\n- \"sides\": 5\n- },\n- \"star\": {\n- \"sides\": 5\n- },\n- \"image\": {\n- \"height\": 100,\n- \"replaceColor\": true,\n- \"src\": \"https://particles.js.org/images/github.svg\",\n- \"width\": 100\n- },\n- \"images\": {\n- \"height\": 100,\n- \"replaceColor\": true,\n- \"src\": \"https://particles.js.org/images/github.svg\",\n- \"width\": 100\n- }\n- },\n- \"type\": \"circle\"\n- },\n- \"size\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": {\n- \"min\": 1,\n- \"max\": 25\n- },\n- \"animation\": {\n- \"count\": 1,\n- \"enable\": true,\n- \"speed\": 5,\n- \"sync\": true,\n- \"destroy\": \"none\",\n- \"minimumValue\": 1,\n- \"startValue\": \"min\"\n- }\n- },\n- \"stroke\": {\n- \"width\": 1,\n- \"color\": {\n- \"value\": \"#ffffff\",\n- \"animation\": {\n- \"h\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- },\n- \"s\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- },\n- \"l\": {\n- \"count\": 0,\n- \"enable\": false,\n- \"offset\": 0,\n- \"speed\": 1,\n- \"sync\": true\n- }\n- }\n- }\n- },\n- \"tilt\": {\n- \"random\": {\n- \"enable\": false,\n- \"minimumValue\": 0\n- },\n- \"value\": 0,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 0,\n- \"sync\": false\n- },\n- \"direction\": \"clockwise\",\n- \"enable\": false\n- },\n- \"twinkle\": {\n- \"lines\": {\n- \"enable\": false,\n- \"frequency\": 0.05,\n- \"opacity\": 1\n- },\n- \"particles\": {\n- \"enable\": false,\n- \"frequency\": 0.05,\n- \"opacity\": 1\n- }\n- },\n- \"wobble\": {\n- \"distance\": 5,\n- \"enable\": false,\n- \"speed\": 50\n- }\n- },\n- \"pauseOnBlur\": true,\n- \"pauseOnOutsideViewport\": true,\n- \"responsive\": [],\n- \"themes\": []\n-}\n", "trail.json": "@@ -1,122 +0,0 @@\n-{\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"fpsLimit\": 120,\n- \"emitters\": {\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"size\": {\n- \"width\": 50,\n- \"height\": 50,\n- \"mode\": \"precise\"\n- },\n- \"rate\": {\n- \"delay\": 1,\n- \"quantity\": 10\n- }\n- },\n- \"particles\": {\n- \"number\": {\n- \"value\": 0,\n- \"limit\": 300\n- },\n- \"color\": {\n- \"value\": [\n- \"#5bc0eb\",\n- \"#fde74c\",\n- \"#9bc53d\",\n- \"#e55934\",\n- \"#fa7921\"\n- ]\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1\n- },\n- \"size\": {\n- \"value\": 1\n- },\n- \"links\": {\n- \"enable\": false,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 1,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"bounce\": false,\n- \"path\": {\n- \"enable\": true,\n- \"delay\": {\n- \"value\": 0.1\n- },\n- \"options\": {\n- \"size\": 5,\n- \"draw\": false,\n- \"increment\": 0.001\n- },\n- \"generator\": \"simplexNoise\"\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#000000\",\n- \"length\": 20\n- },\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"grab\"\n- },\n- \"onClick\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 200,\n- \"links\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- }\n- }\n- },\n- \"detectRetina\": true\n-}\n", "tunnel.json": "@@ -1,83 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 0\n- },\n- \"collisions\": {\n- \"enable\": false\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": {\n- \"min\": 0.3,\n- \"max\": 0.8\n- }\n- },\n- \"size\": {\n- \"value\": {\n- \"min\": 1,\n- \"max\": 10\n- }\n- },\n- \"move\": {\n- \"enable\": true,\n- \"size\": true,\n- \"speed\": 5,\n- \"direction\": \"none\",\n- \"outModes\": {\n- \"default\": \"destroy\"\n- },\n- \"trail\": {\n- \"enable\": true,\n- \"fillColor\": \"#000000\",\n- \"length\": 3\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000\"\n- },\n- \"emitters\": {\n- \"direction\": \"none\",\n- \"rate\": {\n- \"delay\": 0.25,\n- \"quantity\": 10\n- },\n- \"position\": {\n- \"x\": 50,\n- \"y\": 50\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"spawnColor\": {\n- \"value\": \"#ff0000\",\n- \"animation\": {\n- \"h\": {\n- \"enable\": true,\n- \"speed\": 5\n- },\n- \"l\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"offset\": {\n- \"min\": 20,\n- \"max\": 80\n- }\n- }\n- }\n- }\n- }\n-}\n", "twinkle.json": "@@ -1,120 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ff0000\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 2,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- },\n- \"twinkle\": {\n- \"particles\": {\n- \"enable\": true,\n- \"color\": \"#ffff00\",\n- \"frequency\": 0.05,\n- \"opacity\": 1\n- },\n- \"lines\": {\n- \"enable\": true,\n- \"color\": \"#ff0000\",\n- \"frequency\": 0.005,\n- \"opacity\": 1\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "vibrate.json": "@@ -1,107 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 5,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": true,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 0,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"vibrate\": true\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": true,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "virus.json": "@@ -1,146 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"interactivity\": {\n- \"events\": {\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"onHover\": {\n- \"enable\": true,\n- \"mode\": \"bubble\",\n- \"parallax\": {\n- \"enable\": false,\n- \"force\": 60,\n- \"smooth\": 10\n- }\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"bubble\": {\n- \"distance\": 400,\n- \"duration\": 2,\n- \"opacity\": 0.8,\n- \"size\": 40\n- },\n- \"grab\": {\n- \"distance\": 400,\n- \"lineLinked\": {\n- \"opacity\": 1\n- }\n- },\n- \"push\": {\n- \"quantity\": 4\n- },\n- \"remove\": {\n- \"quantity\": 2\n- },\n- \"repulse\": {\n- \"distance\": 200,\n- \"duration\": 0.4\n- }\n- }\n- },\n- \"particles\": {\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"lineLinked\": {\n- \"color\": \"#323031\",\n- \"distance\": 150,\n- \"enable\": false,\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- },\n- \"bounce\": false,\n- \"direction\": \"none\",\n- \"enable\": true,\n- \"out_mode\": \"bounce\",\n- \"random\": false,\n- \"speed\": 6,\n- \"straight\": false\n- },\n- \"number\": {\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- },\n- \"value\": 170\n- },\n- \"opacity\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 1,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 0.5\n- },\n- \"shape\": {\n- \"character\": {\n- \"fill\": false,\n- \"font\": \"Verdana\",\n- \"style\": \"\",\n- \"value\": \"*\",\n- \"weight\": \"400\"\n- },\n- \"image\": {\n- \"height\": 32,\n- \"replace_color\": true,\n- \"src\": \"https://particles.js.org/images/sars-cov-2.png\",\n- \"width\": 32\n- },\n- \"polygon\": {\n- \"nb_sides\": 5\n- },\n- \"stroke\": {\n- \"color\": \"#000000\",\n- \"width\": 0\n- },\n- \"type\": \"image\"\n- },\n- \"size\": {\n- \"animation\": {\n- \"enable\": false,\n- \"minimumValue\": 0.1,\n- \"speed\": 40,\n- \"sync\": false\n- },\n- \"random\": false,\n- \"value\": 16\n- }\n- },\n- \"polygon\": {\n- \"draw\": {\n- \"enable\": false,\n- \"lineColor\": \"#ffffff\",\n- \"lineWidth\": 0.5\n- },\n- \"move\": {\n- \"radius\": 10\n- },\n- \"scale\": 1,\n- \"type\": \"none\",\n- \"url\": \"\"\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#323031\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "warp.json": "@@ -1,108 +0,0 @@\n-{\n- \"fullScreen\": {\n- \"enable\": true,\n- \"zIndex\": 0\n- },\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"number\": {\n- \"value\": 80,\n- \"density\": {\n- \"enable\": true,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#ffffff\"\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 0.5,\n- \"random\": false,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"opacity_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3,\n- \"random\": true,\n- \"anim\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"size_min\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"line_linked\": {\n- \"enable\": true,\n- \"distance\": 150,\n- \"color\": \"#ffffff\",\n- \"warp\": true,\n- \"opacity\": 1,\n- \"width\": 1\n- },\n- \"move\": {\n- \"enable\": true,\n- \"speed\": 6,\n- \"direction\": \"none\",\n- \"random\": false,\n- \"straight\": false,\n- \"out_mode\": \"out\",\n- \"warp\": true,\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onhover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onclick\": {\n- \"enable\": false,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"line_linked\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"particles_nb\": 4\n- },\n- \"remove\": {\n- \"particles_nb\": 2\n- }\n- }\n- },\n- \"retina_detect\": true,\n- \"background\": {\n- \"color\": \"#0d47a1\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "wobble.json": "@@ -1,162 +0,0 @@\n-{\n- \"background\": {\n- \"color\": {\n- \"value\": \"#000000\"\n- }\n- },\n- \"fullScreen\": {\n- \"enable\": false,\n- \"zIndex\": -1\n- },\n- \"particles\": {\n- \"bounce\": {\n- \"vertical\": {\n- \"value\": 0\n- },\n- \"horizontal\": {\n- \"value\": 0\n- }\n- },\n- \"color\": {\n- \"value\": [\n- \"#1E00FF\",\n- \"#FF0061\",\n- \"#E1FF00\",\n- \"#00FF9E\"\n- ],\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 30\n- }\n- },\n- \"move\": {\n- \"decay\": 0.1,\n- \"direction\": \"top\",\n- \"enable\": true,\n- \"gravity\": {\n- \"acceleration\": 9.81,\n- \"enable\": true,\n- \"maxSpeed\": 200\n- },\n- \"outModes\": {\n- \"top\": \"none\",\n- \"default\": \"destroy\",\n- \"bottom\": \"bounce\"\n- },\n- \"speed\": {\n- \"min\": 50,\n- \"max\": 150\n- }\n- },\n- \"number\": {\n- \"value\": 0,\n- \"limit\": 300\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"animation\": {\n- \"enable\": false,\n- \"startValue\": \"max\",\n- \"destroy\": \"min\",\n- \"speed\": 0.3,\n- \"sync\": true\n- }\n- },\n- \"rotate\": {\n- \"value\": {\n- \"min\": 0,\n- \"max\": 360\n- },\n- \"direction\": \"random\",\n- \"move\": true,\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60\n- }\n- },\n- \"tilt\": {\n- \"direction\": \"random\",\n- \"enable\": true,\n- \"move\": true,\n- \"value\": {\n- \"min\": 0,\n- \"max\": 360\n- },\n- \"animation\": {\n- \"enable\": true,\n- \"speed\": 60\n- }\n- },\n- \"shape\": {\n- \"type\": [\n- \"circle\",\n- \"square\",\n- \"polygon\",\n- \"character\",\n- \"character\",\n- \"character\"\n- ],\n- \"options\": {\n- \"polygon\": [\n- {\n- \"sides\": 5\n- },\n- {\n- \"sides\": 6\n- }\n- ],\n- \"character\": [\n- {\n- \"value\": [\n- \"\ud83d\udca9\",\n- \"\ud83e\udd21\",\n- \"\ud83c\udf40\",\n- \"\ud83c\udf59\"\n- ]\n- }\n- ]\n- }\n- },\n- \"size\": {\n- \"value\": 3\n- },\n- \"roll\": {\n- \"darken\": {\n- \"enable\": true,\n- \"value\": 30\n- },\n- \"enlighten\": {\n- \"enable\": true,\n- \"value\": 30\n- },\n- \"enable\": true,\n- \"speed\": {\n- \"min\": 15,\n- \"max\": 25\n- }\n- },\n- \"wobble\": {\n- \"distance\": 30,\n- \"enable\": true,\n- \"move\": true,\n- \"speed\": {\n- \"min\": -15,\n- \"max\": 15\n- }\n- }\n- },\n- \"emitters\": {\n- \"position\": {\n- \"x\": 50,\n- \"y\": 100\n- },\n- \"size\": {\n- \"width\": 0,\n- \"height\": 0\n- },\n- \"rate\": {\n- \"quantity\": 10,\n- \"delay\": 0.1\n- }\n- }\n-}\n", "zindex.json": "@@ -1,150 +0,0 @@\n-{\n- \"fpsLimit\": 120,\n- \"particles\": {\n- \"groups\": {\n- \"z5000\": {\n- \"number\": {\n- \"value\": 70\n- },\n- \"zIndex\": {\n- \"value\": 50\n- }\n- },\n- \"z7500\": {\n- \"number\": {\n- \"value\": 30\n- },\n- \"zIndex\": {\n- \"value\": 75\n- }\n- },\n- \"z2500\": {\n- \"number\": {\n- \"value\": 50\n- },\n- \"zIndex\": {\n- \"value\": 25\n- }\n- },\n- \"z1000\": {\n- \"number\": {\n- \"value\": 40\n- },\n- \"zIndex\": {\n- \"value\": 10\n- }\n- }\n- },\n- \"number\": {\n- \"value\": 200,\n- \"density\": {\n- \"enable\": false,\n- \"value_area\": 800\n- }\n- },\n- \"color\": {\n- \"value\": \"#fff\",\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 20,\n- \"sync\": true\n- }\n- },\n- \"shape\": {\n- \"type\": \"circle\"\n- },\n- \"opacity\": {\n- \"value\": 1,\n- \"random\": false,\n- \"animation\": {\n- \"enable\": false,\n- \"speed\": 3,\n- \"minimumValue\": 0.1,\n- \"sync\": false\n- }\n- },\n- \"size\": {\n- \"value\": 3\n- },\n- \"links\": {\n- \"enable\": false,\n- \"distance\": 100,\n- \"color\": \"#ffffff\",\n- \"opacity\": 0.4,\n- \"width\": 1\n- },\n- \"move\": {\n- \"angle\": {\n- \"value\": 10,\n- \"offset\": 0\n- },\n- \"enable\": true,\n- \"speed\": 5,\n- \"direction\": \"right\",\n- \"random\": false,\n- \"straight\": false,\n- \"outModes\": {\n- \"default\": \"out\"\n- },\n- \"attract\": {\n- \"enable\": false,\n- \"rotateX\": 600,\n- \"rotateY\": 1200\n- }\n- },\n- \"zIndex\": {\n- \"value\": 5,\n- \"opacityRate\": 0.5\n- }\n- },\n- \"interactivity\": {\n- \"events\": {\n- \"onHover\": {\n- \"enable\": false,\n- \"mode\": \"repulse\"\n- },\n- \"onClick\": {\n- \"enable\": true,\n- \"mode\": \"push\"\n- },\n- \"resize\": true\n- },\n- \"modes\": {\n- \"grab\": {\n- \"distance\": 400,\n- \"links\": {\n- \"opacity\": 1\n- }\n- },\n- \"bubble\": {\n- \"distance\": 400,\n- \"size\": 40,\n- \"duration\": 2,\n- \"opacity\": 0.8\n- },\n- \"repulse\": {\n- \"distance\": 200\n- },\n- \"push\": {\n- \"quantity\": 4,\n- \"groups\": [\n- \"z5000\",\n- \"z7500\",\n- \"z2500\",\n- \"z1000\"\n- ]\n- },\n- \"remove\": {\n- \"quantity\": 2\n- }\n- }\n- },\n- \"detectRetina\": true,\n- \"background\": {\n- \"color\": \"#000000\",\n- \"image\": \"\",\n- \"position\": \"50% 50%\",\n- \"repeat\": \"no-repeat\",\n- \"size\": \"cover\"\n- }\n-}\n", "index.html": "@@ -54,7 +54,7 @@\n <option value=\"customPreset\">Custom Preset</option>\n <option value=\"customShape\">Custom Shape</option>\n <option value=\"dataImages\">Data URI Images</option>\n- <option value=\"default\">Default</option>\n+ <option value=\"basic\">Default</option>\n <option value=\"destroy\">Destroy</option>\n <option value=\"disappearing\">Disappearing</option>\n <option value=\"divEvents\">Div Events</option>\n@@ -158,18 +158,20 @@\n \n <script type=\"text/javascript\">\n document.addEventListener(\"DOMContentLoaded\", () => {\n- loadInfectionPlugin(tsParticles);\n- loadLightInteraction(tsParticles);\n- loadParticlesRepulseInteraction(tsParticles);\n- loadGradientUpdater(tsParticles);\n- loadOrbitUpdater(tsParticles);\n- loadCurvesPath(tsParticles);\n- loadPolygonPath(tsParticles);\n- loadPerlinNoisePath(tsParticles);\n- loadSimplexNoisePath(tsParticles);\n-\n- const loadPreset = (preset) => {\n- tsParticles.loadJSON(\"tsparticles\", `/configs/${preset}.json`);\n+ (async () => {\n+ await loadInfectionPlugin(tsParticles);\n+ await loadLightInteraction(tsParticles);\n+ await loadParticlesRepulseInteraction(tsParticles);\n+ await loadGradientUpdater(tsParticles);\n+ await loadOrbitUpdater(tsParticles);\n+ await loadCurvesPath(tsParticles);\n+ await loadPolygonPath(tsParticles);\n+ await loadPerlinNoisePath(tsParticles);\n+ await loadSimplexNoisePath(tsParticles);\n+ })();\n+\n+ const loadPreset = async (preset) => {\n+ await tsParticles.loadJSON(\"tsparticles\", `/configs/${preset}.json`);\n }\n \n loadPreset(sessionStorage.preset || \"index\");\n", "404.js": "@@ -1 +1,66 @@\n-tsParticles.loadJSON(\"tsparticles\", \"/configs/404.json\");\n\\ No newline at end of file\n+tsParticles.load(\"tsparticles\", {\n+ \"fullScreen\": {\n+ \"enable\": true,\n+ \"zIndex\": -1\n+ },\n+ \"particles\": {\n+ \"number\": {\n+ \"value\": 50\n+ },\n+ \"color\": {\n+ \"value\": [\n+ \"#3998D0\",\n+ \"#2EB6AF\",\n+ \"#A9BD33\",\n+ \"#FEC73B\",\n+ \"#F89930\",\n+ \"#F45623\",\n+ \"#D62E32\",\n+ \"#EB586E\",\n+ \"#9952CF\"\n+ ]\n+ },\n+ \"shape\": {\n+ \"type\": \"circle\"\n+ },\n+ \"opacity\": {\n+ \"value\": 0.8,\n+ \"random\": {\n+ \"enable\": true,\n+ \"minimumValue\": 0.4\n+ }\n+ },\n+ \"size\": {\n+ \"value\": 400,\n+ \"random\": {\n+ \"enable\": true,\n+ \"minimumValue\": 200\n+ },\n+ \"animation\": {\n+ \"enable\": true,\n+ \"speed\": 100,\n+ \"minimumValue\": 200,\n+ \"sync\": false\n+ }\n+ },\n+ \"move\": {\n+ \"enable\": true,\n+ \"speed\": 10,\n+ \"direction\": \"top\",\n+ \"random\": false,\n+ \"straight\": false,\n+ \"outMode\": \"out\",\n+ \"attract\": {\n+ \"enable\": false,\n+ \"rotate\": {\n+ \"x\": 600,\n+ \"y\": 1200\n+ }\n+ }\n+ }\n+ },\n+ \"background\": {\n+ \"color\": \"#ffffff\"\n+ }\n+ }\n+);\n", "package.json": "@@ -7,7 +7,8 @@\n \"scripts\": {\n \"clear:cache\": \"rimraf -f ./node_modules/.cache\",\n \"clear:docs\": \"rimraf -f ./docs\",\n- \"build:js\": \"minify ./js/404.js > ./js/404.min.js && pnpm run copy:engine && pnpm run copy:light && pnpm run copy:p-repulse && pnpm run copy:curves && pnpm run copy:polygon && pnpm run copy:perlin && pnpm run copy:simplex && pnpm run copy:infection && pnpm run copy:gradient && pnpm run copy:orbit\",\n+ \"build:js\": \"minify ./js/404.js > ./js/404.min.js && pnpm run copy:engine && pnpm run copy:light && pnpm run copy:p-repulse && pnpm run copy:curves && pnpm run copy:polygon && pnpm run copy:perlin && pnpm run copy:simplex && pnpm run copy:infection && pnpm run copy:gradient && pnpm run copy:orbit && pnpm run copy:configs\",\n+ \"copy:configs\": \"copyfiles -u 2 ./node_modules/tsparticles-demo-configs/*.json ./configs\",\n \"copy:engine\": \"copyfiles -u 2 ./node_modules/tsparticles/tsparticles.bundle.min.js ./js\",\n \"copy:light\": \"copyfiles -u 2 ./node_modules/tsparticles-interaction-light/tsparticles.interaction.light.min.js ./js\",\n \"copy:p-repulse\": \"copyfiles -u 2 ./node_modules/tsparticles-interaction-particles-repulse/tsparticles.interaction.particles.repulse.min.js ./js\",\n@@ -40,6 +41,7 @@\n \"rimraf\": \"^3.0.2\",\n \"sass\": \"^1.56.0\",\n \"tsparticles\": \"^2.5.3\",\n+ \"tsparticles-demo-configs\": \"^1.5.3\",\n \"tsparticles-engine\": \"^2.5.2\",\n \"tsparticles-interaction-light\": \"^2.5.3\",\n \"tsparticles-interaction-particles-repulse\": \"^2.5.3\",\n"}
build: update version (nightly.0)
92e940efeee199b1e0bbbc3c9eea7f3dc8221619
build
https://github.com/erg-lang/erg/commit/92e940efeee199b1e0bbbc3c9eea7f3dc8221619
update version (nightly.0)
{"Cargo.lock": "@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -126,7 +126,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -134,7 +134,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\n", "Cargo.toml": "@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n"}
feat: add `Kind::from_len_in_bytes()` const fn (#279)
ce673bfd9afee4a7872c6bcae1c39006b1747be7
feat
https://github.com/Byron/gitoxide/commit/ce673bfd9afee4a7872c6bcae1c39006b1747be7
add `Kind::from_len_in_bytes()` const fn (#279)
{"lib.rs": "@@ -100,4 +100,12 @@ impl Kind {\n Kind::Sha1 => 20,\n }\n }\n+\n+ /// Converts a size in bytes as obtained by `Kind::len_in_bytes()` into the corresponding hash kind, if possible.\n+ pub const fn from_len_in_bytes(bytes: usize) -> Option<Self> {\n+ Some(match bytes {\n+ 20 => Kind::Sha1,\n+ _ => return None,\n+ })\n+ }\n }\n"}
test: add passing test for #3540 Closes #3540
0ac88903b5a8753be05fd63a15a0ffe3ea8a660d
test
https://github.com/mikro-orm/mikro-orm/commit/0ac88903b5a8753be05fd63a15a0ffe3ea8a660d
add passing test for #3540 Closes #3540
{"GH3540.test.ts": "@@ -0,0 +1,104 @@\n+import { ArrayType, Entity, PrimaryKey, Property, SimpleLogger } from '@mikro-orm/core';\n+import { MikroORM } from '@mikro-orm/mysql';\n+import { mockLogger } from '../helpers';\n+\n+@Entity()\n+export class Foo {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ type: ArrayType, nullable: true })\n+ names!: string[];\n+\n+}\n+\n+let orm: MikroORM;\n+\n+beforeAll(async () => {\n+ orm = await MikroORM.init({\n+ entities: [Foo],\n+ dbName: `mikro_orm_test_3540`,\n+ type: 'mysql',\n+ port: 3308,\n+ loggerFactory: options => new SimpleLogger(options),\n+ });\n+ await orm.schema.refreshDatabase();\n+});\n+\n+beforeEach(async () => {\n+ await orm.schema.clearDatabase();\n+});\n+\n+afterAll(() => orm.close(true));\n+\n+test('GH issue 3540', async () => {\n+ const foo = new Foo();\n+ foo.id = 1;\n+ foo.names = [];\n+\n+ const mock = mockLogger(orm, ['query', 'query-params']);\n+ await orm.em.persistAndFlush(foo);\n+\n+ foo.names.push('1');\n+ await orm.em.flush();\n+\n+ foo.names.push('2', '3');\n+ await orm.em.flush();\n+\n+ foo.names = [];\n+ await orm.em.flush();\n+\n+ expect(mock.mock.calls).toEqual([\n+ ['[query] begin'],\n+ ['[query] insert into `foo` (`id`, `names`) values (1, \\'\\')'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = \\'1\\' where `id` = 1'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = \\'1,2,3\\' where `id` = 1'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = \\'\\' where `id` = 1'],\n+ ['[query] commit'],\n+ ]);\n+});\n+\n+test('GH issue 3540 batch update', async () => {\n+ const foos = [new Foo(), new Foo()];\n+ foos[0].id = 1;\n+ foos[0].names = [];\n+ foos[1].id = 2;\n+ foos[1].names = [];\n+\n+ const mock = mockLogger(orm, ['query', 'query-params']);\n+ await orm.em.persistAndFlush(foos);\n+\n+ foos[0].names.push('1');\n+ foos[1].names.push('1');\n+ await orm.em.flush();\n+\n+ foos[0].names.push('2', '3');\n+ foos[1].names.push('2', '3');\n+ await orm.em.flush();\n+\n+ foos[0].names = [];\n+ foos[1].names = [];\n+ await orm.em.flush();\n+\n+ expect(mock.mock.calls).toEqual([\n+ ['[query] begin'],\n+ ['[query] insert into `foo` (`id`, `names`) values (1, \\'\\'), (2, \\'\\')'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = case when (`id` = 1) then \\'1\\' when (`id` = 2) then \\'1\\' else `names` end where `id` in (1, 2)'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = case when (`id` = 1) then \\'1,2,3\\' when (`id` = 2) then \\'1,2,3\\' else `names` end where `id` in (1, 2)'],\n+ ['[query] commit'],\n+ ['[query] begin'],\n+ ['[query] update `foo` set `names` = case when (`id` = 1) then \\'\\' when (`id` = 2) then \\'\\' else `names` end where `id` in (1, 2)'],\n+ ['[query] commit'],\n+ ]);\n+});\n"}
docs: add v4.2
06fc60452ffde26101dbac3d9afab14fc5c2ecf7
docs
https://github.com/mikro-orm/mikro-orm/commit/06fc60452ffde26101dbac3d9afab14fc5c2ecf7
add v4.2
{"async-local-storage.md": "@@ -0,0 +1,19 @@\n+---\n+title: Using AsyncLocalStorage\n+---\n+\n+By default, `domain` api use used in the `RequestContext` helper. Since v4.0.3,\n+you can use the new `AsyncLocalStorage` too, if you are on up to date node version:\n+\n+```typescript\n+const storage = new AsyncLocalStorage<EntityManager>();\n+\n+const orm = await MikroORM.init({\n+ context: () => storage.getStore(),\n+ // ...\n+});\n+\n+app.use((req, res, next) => {\n+ storage.run(orm.em.fork(true, true), next);\n+});\n+```\n", "caching.md": "@@ -0,0 +1,44 @@\n+---\n+title: Result cache\n+---\n+\n+MikroORM have simple result caching mechanism. It works with those methods of \n+`EntityManager`: `find()`, `findOne()`, `findAndCount()`, `findOneOrFail()`,\n+`count()`, as well as with `QueryBuilder` result methods (including `execute`). \n+\n+By default, in memory cache is used, that is shared for the whole `MikroORM` \n+instance. Default expiration is 1 second.\n+\n+```ts\n+const res = await em.find(Book, { author: { name: 'Jon Snow' } }, {\n+ populate: ['author', 'tags'], \n+ cache: 50, // set expiration to 50ms\n+ // cache: ['cache-id', 50], // set custom cache id and expiration\n+ // cache: true, // use default cache id and expiration\n+});\n+```\n+\n+Or with query builder:\n+\n+```ts\n+const res = await em.createQueryBuilder(Book)\n+ .where({ author: { name: 'Jon Snow' } })\n+ .cache()\n+ .getResultList();\n+```\n+\n+We can change the default expiration as well as provide custom cache adapter in\n+the ORM configuration:\n+\n+```ts\n+const orm = await MikroORM.init({\n+ resultCache: {\n+ adapter: MemoryCacheAdapter,\n+ expiration: 1000, // 1s\n+ options: {},\n+ },\n+ // ...\n+});\n+```\n+\n+Custom cache adapters need to implement `CacheAdapter` interface. \n", "cascading.md": "@@ -0,0 +1,138 @@\n+---\n+title: Cascading persist, merge and remove\n+sidebar_label: Cascading\n+---\n+\n+> From v4.2, cascade merging is no longer configurable (and is kept enabled for all relations).\n+\n+When persisting or removing entity, all your references are by default cascade persisted. \n+This means that by persisting any entity, ORM will automatically persist all of its \n+associations. \n+\n+You can control this behaviour via `cascade` attribute of `@ManyToOne`, `@ManyToMany`, \n+`@OneToMany` and `@OneToOne` fields.\n+\n+> New entities without primary key will be always persisted, regardless of `cascade` value. \n+\n+```typescript\n+// cascade persist is default value\n+@OneToMany({ entity: () => Book, mappedBy: 'author' })\n+books = new Collection<Book>(this);\n+\n+// same as previous definition\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [Cascade.PERSIST] })\n+books = new Collection<Book>(this);\n+\n+// only cascade remove\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [Cascade.REMOVE] })\n+books = new Collection<Book>(this);\n+\n+// cascade persist and remove (same as `Cascade.ALL`)\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [Cascade.PERSIST, Cascade.REMOVE] })\n+books = new Collection<Book>(this);\n+\n+// no cascade\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [] })\n+books = new Collection<Book>(this);\n+\n+// cascade all (persist and remove)\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [Cascade.ALL] })\n+books = new Collection<Book>(this);\n+\n+// same as previous definition\n+@OneToMany({ entity: () => Book, mappedBy: 'author', cascade: [Cascade.PERSIST, Cascade.REMOVE] })\n+books = new Collection<Book>(this);\n+```\n+\n+## Cascade persist\n+\n+Here is example of how cascade persist works:\n+\n+```typescript\n+const book = await orm.em.findOne(Book, 'id', ['author', 'tags']);\n+book.author.name = 'Foo Bar';\n+book.tags[0].name = 'new name 1';\n+book.tags[1].name = 'new name 2';\n+await orm.em.persistAndFlush(book); // all book tags and author will be persisted too\n+```\n+\n+> When cascade persisting collections, keep in mind only fully initialized collections \n+> will be cascade persisted.\n+\n+## Cascade remove\n+\n+Cascade remove works same way as cascade persist, just for removing entities. Following \n+example assumes that `Book.publisher` is set to `Cascade.REMOVE`:\n+\n+> Note that cascade remove for collections can be inefficient as it will fire 1 query\n+> for each entity in collection.\n+\n+```typescript\n+await orm.em.removeEntity(book); // this will also remove book.publisher\n+```\n+\n+Keep in mind that cascade remove **can be dangerous** when used on `@ManyToOne` fields, \n+as cascade removed entity can stay referenced in another entities that were not removed.\n+\n+```typescript\n+const publisher = new Publisher(...);\n+// all books with same publisher\n+book1.publisher = book2.publisher = book3.publisher = publisher;\n+await orm.em.removeEntity(book1); // this will remove book1 and its publisher\n+\n+// but we still have reference to removed publisher here\n+console.log(book2.publisher, book3.publisher);\n+```\n+\n+## Orphan removal\n+\n+In addition to `Cascade.REMOVE`, there is also additional and more aggressive remove \n+cascading mode which can be specified using the `orphanRemoval` flag of the `@OneToOne`\n+and `@OneToMany` properties:\n+\n+```typescript\n+@Entity()\n+export class Author {\n+\n+ @OneToMany({ entity: () => Book, mappedBy: 'author', orphanRemoval: true })\n+ books = new Collection<Book>(this);\n+\n+}\n+```\n+\n+> `orphanRemoval` flag behaves just like `Cascade.REMOVE` for remove operation, so specifying \n+> both is redundant.\n+\n+With simple `Cascade.REMOVE`, you would need to remove the `Author` entity to cascade \n+the operation down to all loaded `Book`s. By enabling orphan removal on the collection, \n+`Book`s will be also removed when they get disconnected from the collection (either via \n+`remove()`, or by replacing collection items via `set()`):\n+\n+```typescript\n+await author.books.set([book1, book2]); // replace whole collection\n+await author.books.remove(book1); // remove book from collection\n+await orm.em.persistAndFlush(author); // book1 will be removed, as well as all original items (before we called `set()`)\n+```\n+\n+In this example, no `Book` would be removed with simple `Cascade.REMOVE` as no remove operation\n+was executed. \n+\n+## Declarative Referential Integrity\n+\n+> This is only supported in SQL drivers.\n+\n+As opposed to the application level cascading controlled by the `cascade` option, we can\n+also define database level referential integrity actions: `on update` and `on delete`.\n+\n+Their values are automatically inferred from the `cascade` option value. You can also \n+control the value manually via `onUpdateIntegrity` and `onDelete` options. \n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @ManyToOne({ onUpdateIntegrity: 'set null', onDelete: 'cascade' })\n+ author?: Author;\n+\n+}\n+```\n", "collections.md": "@@ -0,0 +1,192 @@\n+---\n+title: Collections\n+---\n+\n+`OneToMany` and `ManyToMany` collections are stored in a `Collection` wrapper. It implements\n+iterator so you can use `for of` loop to iterate through it. \n+\n+Another way to access collection items is to use bracket syntax like when you access array items.\n+Keep in mind that this approach will not check if the collection is initialed, while using `get`\n+method will throw error in this case.\n+\n+> Note that array access in `Collection` is available only for reading already loaded items, you \n+> cannot add new items to `Collection` this way. \n+\n+```typescript\n+const author = orm.em.findOne(Author, '...', ['books']); // populating books collection\n+\n+// or we could lazy load books collection later via `init()` method\n+await author.books.init();\n+\n+for (const book of author.books) {\n+ console.log(book.title); // initialized\n+ console.log(book.author.isInitialized()); // true\n+ console.log(book.author.id);\n+ console.log(book.author.name); // Jon Snow\n+ console.log(book.publisher); // just reference\n+ console.log(book.publisher.isInitialized()); // false\n+ console.log(book.publisher.id);\n+ console.log(book.publisher.name); // undefined\n+}\n+\n+// collection needs to be initialized before you can work with it\n+author.books.add(book);\n+console.log(author.books.contains(book)); // true\n+author.books.remove(book);\n+console.log(author.books.contains(book)); // false\n+author.books.add(book);\n+console.log(author.books.count()); // 1\n+author.books.removeAll();\n+console.log(author.books.contains(book)); // false\n+console.log(author.books.count()); // 0\n+console.log(author.books.getItems()); // Book[]\n+console.log(author.books.getIdentifiers()); // array of string | number\n+console.log(author.books.getIdentifiers('_id')); // array of ObjectId\n+\n+// array access works as well\n+console.log(author.books[1]); // Book\n+console.log(author.books[12345]); // undefined, even if the collection is not initialized\n+\n+const author = orm.em.findOne(Author, '...'); // books collection has not been populated\n+console.log(author.books.getItems()); // throws because the collection has not been initialized\n+// initialize collection if not already loaded and return its items as array\n+console.log(await author.books.loadItems()); // Book[]\n+```\n+\n+## OneToMany Collections\n+\n+`OneToMany` collections are inverse side of `ManyToOne` references, to which they need to point via `fk` attribute:\n+ \n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ _id!: ObjectId;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+\n+@Entity()\n+export class Author {\n+\n+ @PrimaryKey()\n+ _id!: ObjectId;\n+\n+ @OneToMany(() => Book, book => book.author)\n+ books1 = new Collection<Book>(this);\n+\n+ // or via options object\n+ @OneToMany({ entity: () => Book, mappedBy: 'author' })\n+ books2 = new Collection<Book>(this);\n+\n+}\n+```\n+\n+## ManyToMany Collections\n+\n+For ManyToMany, SQL drivers use pivot table that holds reference to both entities. \n+\n+As opposed to them, with MongoDB we do not need to have join tables for `ManyToMany` \n+relations. All references are stored as an array of `ObjectId`s on owning entity. \n+\n+### Unidirectional\n+\n+Unidirectional `ManyToMany` relations are defined only on one side, if you define only `entity`\n+attribute, then it will be considered the owning side:\n+\n+```typescript\n+@ManyToMany(() => Book)\n+books1 = new Collection<Book>(this);\n+\n+// or mark it as owner explicitly via options object\n+@ManyToMany({ entity: () => Book, owner: true })\n+books2 = new Collection<Book>(this);\n+```\n+\n+### Bidirectional\n+\n+Bidirectional `ManyToMany` relations are defined on both sides, while one is owning side (where references are store), \n+marked by `inversedBy` attribute pointing to the inverse side:\n+\n+```typescript\n+@ManyToMany(() => BookTag, tag => tag.books, { owner: true })\n+tags = new Collection<BookTag>(this);\n+\n+// or via options object\n+@ManyToMany({ entity: () => BookTag, inversedBy: 'books' })\n+tags = new Collection<BookTag>(this);\n+```\n+\n+And on the inversed side we define it with `mappedBy` attribute pointing back to the owner:\n+\n+```typescript\n+@ManyToMany(() => Book, book => book.tags)\n+books = new Collection<Book>(this);\n+\n+// or via options object\n+@ManyToMany({ entity: () => Book, mappedBy: 'tags' })\n+books = new Collection<Book>(this);\n+```\n+\n+### Forcing fixed order of collection items\n+\n+> Since v3 many to many collections does not require having auto increment primary key, that \n+> was used to ensure fixed order of collection items.\n+\n+To preserve fixed order of collections, you can use `fixedOrder: true` attribute, which will \n+start ordering by `id` column. Schema generator will convert the pivot table to have auto increment\n+primary key `id`. You can also change the order column name via `fixedOrderColumn: 'order'`. \n+\n+You can also specify default ordering via `orderBy: { ... }` attribute. This will be used when\n+you fully populate the collection including its items, as it orders by the referenced entity \n+properties instead of pivot table columns (which `fixedOrderColumn` is). On the other hand, \n+`fixedOrder` is used to maintain the insert order of items instead of ordering by some property. \n+\n+## Propagation of Collection's add() and remove() operations\n+\n+When you use one of `Collection.add()` method, the item is added to given collection, \n+and this action is also propagated to its counterpart. \n+\n+```typescript\n+// one to many\n+const author = new Author(...);\n+const book = new Book(...);\n+\n+author.books.add(book);\n+console.log(book.author); // author will be set thanks to the propagation\n+```\n+\n+For M:N this works in both ways, either from owning side, or from inverse side. \n+\n+```typescript\n+// many to many works both from owning side and from inverse side\n+const book = new Book(...);\n+const tag = new BookTag(...);\n+\n+book.tags.add(tag);\n+console.log(tag.books.contains(book)); // true\n+\n+tag.books.add(book);\n+console.log(book.tags.contains(tag)); // true\n+``` \n+\n+> Collections on both sides have to be initialized, otherwise propagation won't work.\n+\n+> Although this propagation works also for M:N inverse side, you should always use owning\n+> side to manipulate the collection.\n+\n+Same applies for `Collection.remove()`.\n+\n+## Filtering and ordering of collection items\n+\n+When initializing collection items via `collection.init()`, you can filter the collection\n+as well as order its items:\n+\n+```typescript\n+await book.tags.init({ where: { active: true }, orderBy: { name: QueryOrder.DESC } });\n+```\n+\n+> You should never modify partially loaded collection.\n", "composite-keys.md": "@@ -0,0 +1,264 @@\n+---\n+title: Composite and Foreign Keys as Primary Key\n+sidebar_label: Composite Primary Keys\n+---\n+\n+> Support for composite keys was added in version 3.5\n+\n+MikroORM supports composite primary keys natively. Composite keys are a very powerful \n+relational database concept and we took good care to make sure MikroORM supports as \n+many of the composite primary key use-cases. MikroORM supports composite keys of primitive \n+data-types as well as foreign keys as primary keys. You can also use your composite key \n+entities in relationships. \n+\n+This section shows how the semantics of composite primary keys work and how they map \n+to the database.\n+\n+## General Considerations\n+\n+ID fields have to have their values set before you call `em.persist(entity)`.\n+\n+## Primitive Types only\n+\n+Suppose you want to create a database of cars and use the model-name and year of \n+production as primary keys:\n+\n+```typescript\n+@Entity()\n+export class Car {\n+\n+ @PrimaryKey()\n+ name: string;\n+\n+ @PrimaryKey()\n+ year: number;\n+\n+ [PrimaryKeyType]: [string, number]; // this is needed for proper type checks in `FilterQuery`\n+\n+ constructor(name: string, year: number) {\n+ this.name = name;\n+ this.year = year;\n+ }\n+\n+}\n+```\n+\n+Now you can use this entity:\n+\n+```typescript\n+const car = new Car('Audi A8', 2010);\n+await em.persistAndFlush(car);\n+```\n+\n+And for querying you need to provide all primary keys in the condition or an array of\n+primary keys in the same order as the keys were defined:\n+\n+```typescript\n+const audi1 = await em.findOneOrFail(Car, { name: 'Audi A8', year: 2010 });\n+const audi2 = await em.findOneOrFail(Car, ['Audi A8', 2010]);\n+```\n+\n+> If we want to use the second approach with primary key tuple, we will need to specify \n+> the type of entity's primary key via `PrimaryKeyType` symbol as shown in the `Car` entity.\n+\n+> `PrimaryKeyType` is not needed when your entity has single scalar primary key under \n+> one of following property names: `id: number | string | bigint`, `_id: any` or \n+> `uuid: string`.\n+\n+You can also use this entity in associations. MikroORM will then generate two foreign \n+keys one for name and to year to the related entities.\n+\n+This example shows how you can nicely solve the requirement for existing values before \n+`em.persist()`: By adding them as mandatory values for the constructor.\n+\n+## Identity through foreign Entities\n+\n+There are tons of use-cases where the identity of an Entity should be determined by \n+the entity of one or many parent entities.\n+\n+- Dynamic Attributes of an Entity (for example `Article`). Each Article has many \n+ attributes with primary key `article_id` and `attribute_name`.\n+- `Address` object of a `Person`, the primary key of the address is `user_id`. This \n+ is not a case of a composite primary key, but the identity is derived through a \n+ foreign entity and a foreign key.\n+- Pivot Tables with metadata can be modelled as Entity, for example connections between \n+ two articles with a little description and a score.\n+\n+The semantics of mapping identity through foreign entities are easy:\n+\n+- Only allowed on `@ManyToOnes` or `@OneToOne` associations.\n+- Use `primary: true` in the decorator.\n+\n+## Use-Case 1: Dynamic Attributes\n+\n+We keep up the example of an Article with arbitrary attributes, the mapping looks like this:\n+\n+```typescript\n+@Entity()\n+export class Article {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ title!: string;\n+\n+ @OneToMany(() => ArticleAttribute, attr => attr.article, { cascade: Cascade.ALL })\n+ attributes = new Collection<ArticleAttribute>(this);\n+\n+}\n+\n+@Entity()\n+export class ArticleAttribute {\n+\n+ @ManyToOne({ primary: true })\n+ article: Article;\n+\n+ @PrimaryKey()\n+ attribute: string;\n+\n+ @Property()\n+ value!: string;\n+\n+ [PrimaryKeyType]: [number, string]; // this is needed for proper type checks in `FilterQuery`\n+\n+ constructor(name: string, value: string, article: Article) {\n+ this.attribute = name;\n+ this.value = value;\n+ this.article = article;\n+ }\n+\n+}\n+```\n+\n+## Use-Case 2: Simple Derived Identity\n+\n+Sometimes you have the requirement that two objects are related by a `@OneToOne` \n+association and that the dependent class should re-use the primary key of the class \n+it depends on. One good example for this is a user-address relationship:\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @OneToOne(() => Address, address => address.user, { cascade: [Cascade.ALL] })\n+ address?: Address; // virtual property (inverse side) to allow querying the relation\n+\n+}\n+\n+@Entity()\n+export class Address {\n+\n+ @OneToOne({ primary: true })\n+ user!: User;\n+\n+ [PrimaryKeyType]: number; // this is needed for proper type checks in `FilterQuery`\n+\n+}\n+```\n+\n+## Use-Case 3: Join-Table with Metadata\n+\n+In the classic order product shop example there is the concept of the order item which \n+contains references to order and product and additional data such as the amount of products \n+purchased and maybe even the current price.\n+\n+```typescript\n+@Entity()\n+export class Order {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @ManyToOne()\n+ customer: Customer;\n+\n+ @OneToMany(() => OrderItem, item => item.order)\n+ items = new Collection<OrderItem>(this);\n+\n+ @Property()\n+ paid = false;\n+\n+ @Property()\n+ shipped = false;\n+\n+ @Property()\n+ created = new Date();\n+\n+ constructor(customer: Customer) {\n+ this.customer = customer;\n+ }\n+\n+}\n+\n+@Entity()\n+export class Product {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ name!: string;\n+\n+ @Property()\n+ currentPrice!: number;\n+\n+}\n+\n+@Entity()\n+export class OrderItem {\n+\n+ @ManyToOne({ primary: true })\n+ order: Order;\n+\n+ @ManyToOne({ primary: true })\n+ product: Product;\n+\n+ @Property()\n+ amount = 1;\n+\n+ @Property()\n+ offeredPrice: number;\n+\n+ [PrimaryKeyType]: [number, number]; // this is needed for proper type checks in `FilterQuery`\n+\n+ constructor(order: Order, product: Product, amount = 1) {\n+ this.order = order;\n+ this.product = product;\n+ this.offeredPrice = product.currentPrice;\n+ }\n+\n+}\n+```\n+\n+## Using QueryBuilder with composite keys\n+\n+Internally composite keys are represented as tuples, containing all the values in the\n+same order as the primary keys were defined. \n+\n+```typescript\n+const qb1 = em.createQueryBuilder(CarOwner);\n+qb1.select('*').where({ car: { name: 'Audi A8', year: 2010 } });\n+console.log(qb1.getQuery()); // select `e0`.* from `car_owner` as `e0` where `e0`.`name` = ? and `e0`.`year` = ?\n+\n+const qb2 = em.createQueryBuilder(CarOwner);\n+qb2.select('*').where({ car: ['Audi A8', 2010] });\n+console.log(qb2.getQuery()); // 'select `e0`.* from `car_owner` as `e0` where (`e0`.`car_name`, `e0`.`car_year`) = (?, ?)'\n+\n+const qb3 = em.createQueryBuilder(CarOwner);\n+qb3.select('*').where({ car: [['Audi A8', 2010]] });\n+console.log(qb3.getQuery()); // 'select `e0`.* from `car_owner` as `e0` where (`e0`.`car_name`, `e0`.`car_year`) in ((?, ?))'\n+```\n+\n+This also applies when you want to get a reference to entity with composite key:\n+\n+```typescript\n+const ref = em.getReference(Car, ['Audi A8', 2010]);\n+console.log(ref instanceof Car); // true\n+``` \n+\n+> This part of documentation is highly inspired by [doctrine tutorial](https://www.doctrine-project.org/projects/doctrine-orm/en/latest/tutorials/composite-primary-keys.html)\n+> as the behaviour here is pretty much the same.\n", "configuration.md": "@@ -0,0 +1,349 @@\n+---\n+title: Configuration\n+---\n+\n+## Entity Discovery\n+\n+You can either provide array of entity instances via `entities`, or let the ORM look up your \n+entities in selected folders. \n+\n+```typescript\n+MikroORM.init({\n+ entities: [Author, Book, Publisher, BookTag],\n+});\n+```\n+\n+We can also use folder based discovery by providing list of paths to the entities\n+we want to discover (globs are supported as well). This way we also need to specify\n+`entitiesTs`, where we point the paths to the TS source files instead of the JS \n+compiled files (see more at [Metadata Providers](metadata-providers.md)).\n+\n+> The `entitiesTs` option is used when running the app via `ts-node`, as the ORM \n+> needs to discover the TS files. Always specify this option if you use folder/file\n+> based discovery. \n+\n+```typescript\n+MikroORM.init({\n+ entities: ['./dist/modules/users/entities', './dist/modules/projects/entities'],\n+ entitiesTs: ['./src/modules/users/entities', './src/modules/projects/entities'],\n+ // optionally you can override the base directory (defaults to `process.cwd()`)\n+ baseDir: process.cwd(),\n+});\n+```\n+\n+> Be careful when overriding the `baseDir` with dynamic values like `__dirname`, \n+> as you can end up with valid paths from `ts-node`, but invalid paths from `node`.\n+> Ideally you should keep the default of `process.cwd()` there to always have the \n+> same base path regardless of how you run the app.\n+\n+By default, `ReflectMetadataProvider` is used that leverages the `reflect-metadata`. \n+You can also use `TsMorphMetadataProvider` by installing `@mikro-orm/reflection`. \n+This provider will analyse your entity source files (or `.d.ts` type definition files). \n+If you aim to use plain JavaScript instead of TypeScript, use `EntitySchema` or \n+the `JavaScriptMetadataProvider`.\n+\n+> You can also implement your own metadata provider and use it instead. To do so, extend the \n+> `MetadataProvider` class.\n+\n+```typescript\n+import { MikroORM } from '@mikro-orm/core';\n+import { TsMorphMetadataProvider } from '@mikro-orm/reflection';\n+\n+MikroORM.init({\n+ metadataProvider: TsMorphMetadataProvider,\n+});\n+```\n+\n+There are also some additional options how you can adjust the discovery process:\n+\n+```typescript\n+MikroORM.init({\n+ discovery: {\n+ warnWhenNoEntities: false, // by default, discovery throws when no entity is processed\n+ requireEntitiesArray: true, // force usage of class refrences in `entities` instead of paths\n+ alwaysAnalyseProperties: false, // do not analyse properties when not needed (with ts-morph)\n+ },\n+});\n+```\n+\n+> If you disable `discovery.alwaysAnalyseProperties` option, you will need to explicitly \n+> provide `nullable` and `wrappedReference` parameters (where applicable).\n+\n+Read more about this in [Metadata Providers](metadata-providers.md) sections.\n+\n+## Driver\n+\n+To select driver, you can either use `type` option, or provide the driver class reference.\n+\n+| type | driver name | dependency | note |\n+|------|-------------|------------|------|\n+| `mongo` | `MongoDriver` | `mongodb^3.3.4` | - |\n+| `mysql` | `MySqlDriver` | `mysql2^2.0.0` | compatible with MariaDB |\n+| `mariadb` | `MariaDbDriver` | `mariadb^2.0.0` | compatible with MySQL |\n+| `postgresql` | `PostgreSqlDriver` | `pg^7.0.0` | - |\n+| `sqlite` | `SqliteDriver` | `sqlite3^4.0.0` | - |\n+\n+> Driver and connection implementations are not directly exported from `@mikro-orm/core` module. \n+> You can import them from the driver packages (e.g. `import { PostgreSqlDriver } from '@mikro-orm/postgresql'`).\n+\n+> You can pass additional options to the underlying driver (e.g. `mysql2`) via `driverOptions`. \n+> The object will be deeply merged, overriding all internally used options.\n+\n+```typescript\n+import { MySqlDriver } from '@mikro-orm/mysql';\n+\n+MikroORM.init({\n+ driver: MySqlDriver,\n+ driverOptions: { connection: { timezone: '+02:00' } },\n+});\n+```\n+\n+> From v3.5.1 you can also set the timezone directly in the ORM configuration:\n+>\n+> ```typescript\n+> MikroORM.init({\n+> type: 'mysql',\n+> timezone: '+02:00',\n+> });\n+> ```\n+\n+## Connection\n+\n+Each platform (driver) provides default connection string, you can override it as a whole\n+through `clientUrl`, or partially through one of following options:\n+\n+```typescript\n+export interface ConnectionOptions {\n+ dbName?: string;\n+ name?: string; // for logging only (when replicas are used)\n+ clientUrl?: string;\n+ host?: string;\n+ port?: number;\n+ user?: string;\n+ password?: string;\n+ charset?: string;\n+ multipleStatements?: boolean; // for mysql driver\n+ pool?: PoolConfig; // provided by `knex`\n+}\n+```\n+\n+Following table shows default client connection strings:\n+\n+| type | default connection url |\n+|------|------------------------|\n+| `mongo` | `mongodb://127.0.0.1:27017` |\n+| `mysql` | `mysql://[email protected]:3306` |\n+| `mariadb` | `mysql://[email protected]:3306` |\n+| `postgresql` | `postgresql://[email protected]:5432` |\n+\n+To set up read replicas, you can use `replicas` option. You can provide only those parts of the \n+`ConnectionOptions` interface, they will be used to override the `master` connection options.\n+\n+```typescript\n+MikroORM.init({\n+ type: 'mysql',\n+ dbName: 'my_db_name',\n+ user: 'write-user',\n+ host: 'master.db.example.com',\n+ port: 3306,\n+ replicas: [\n+ { user: 'read-user-1', host: 'read-1.db.example.com', port: 3307 },\n+ { user: 'read-user-2', host: 'read-2.db.example.com', port: 3308 },\n+ { user: 'read-user-3', host: 'read-3.db.example.com', port: 3309 },\n+ ],\n+});\n+```\n+\n+Read more about this in [Installation](installation.md) and [Read Connections](read-connections.md) sections.\n+\n+## Naming Strategy\n+\n+When mapping your entities to database tables and columns, their names will be defined by naming \n+strategy. There are 3 basic naming strategies you can choose from:\n+\n+- `UnderscoreNamingStrategy` - default of all SQL drivers\n+- `MongoNamingStrategy` - default of `MongoDriver`\n+- `EntityCaseNamingStrategy` - uses unchanged entity and property names\n+\n+> You can also define your own custom `NamingStrategy` implementation.\n+\n+```typescript\n+MikroORM.init({\n+ namingStrategy: EntityCaseNamingStrategy,\n+});\n+```\n+\n+Read more about this in [Naming Strategy](naming-strategy.md) section.\n+\n+## Auto-join of 1:1 owners\n+\n+By default, owning side of 1:1 relation will be auto-joined when you select the inverse side \n+so we can have the reference to it. You can disable this behaviour via `autoJoinOneToOneOwner` \n+configuration toggle.\n+\n+```typescript\n+MikroORM.init({\n+ autoJoinOneToOneOwner: false,\n+});\n+```\n+\n+## Propagation of 1:1 and m:1 owners\n+\n+MikroORM defines getter and setter for every owning side of m:1 and 1:1 relation. This is \n+then used for propagation of changes to the inverse side of bi-directional relations.\n+\n+```typescript\n+const author = new Author('n', 'e');\n+const book = new Book('t');\n+book.author = author;\n+console.log(author.books.contains(book)); // true\n+```\n+\n+You can disable this behaviour via `propagateToOneOwner` option.\n+\n+```typescript\n+MikroORM.init({\n+ propagateToOneOwner: false,\n+});\n+```\n+\n+## Forcing UTC Timezone\n+\n+Use `forceUtcTimezone` option to force the `Date`s to be saved in UTC in datetime columns \n+without timezone. It works for MySQL (`datetime` type) and PostgreSQL (`timestamp` type). \n+SQLite does this by default. \n+\n+```typescript\n+MikroORM.init({\n+ forceUtcTimezone: true,\n+});\n+```\n+\n+## Custom Hydrator\n+\n+Hydrator is responsible for assigning values from the database to entities. \n+You can implement your custom `Hydrator` (by extending the abstract `Hydrator` class):\n+\n+```typescript\n+MikroORM.init({\n+ hydrator: MyCustomHydrator,\n+});\n+```\n+\n+## Custom Repository\n+\n+You can also register custom base repository (for all entities where you do not specify \n+`customRepository`) globally:\n+\n+> You can still use entity specific repositories in combination with global base repository.\n+\n+```typescript\n+MikroORM.init({\n+ entityRepository: CustomBaseRepository,\n+});\n+```\n+\n+Read more about this in [Repositories](repositories.md) section.\n+\n+## Strict Mode and property validation\n+\n+> Since v4.0.3 the validation needs to be explicitly enabled via `validate: true`.\n+> It has performance implications and usually should not be needed, as long as\n+> you don't modify your entities via `Object.assign()`.\n+\n+`MirkoORM` will validate your properties before actual persisting happens. It will try to fix wrong \n+data types for you automatically. If automatic conversion fails, it will throw an error. You can \n+enable strict mode to disable this feature and let ORM throw errors instead. Validation is triggered \n+when persisting the entity. \n+\n+```typescript\n+MikroORM.init({\n+ validate: true,\n+ strict: true,\n+});\n+```\n+\n+Read more about this in [Property Validation](property-validation.md) section.\n+\n+## Debugging & Logging\n+\n+You can enable logging with `debug` option. Either set it to `true` to log everything, or \n+provide array of `'query' | 'query-params' | 'discovery' | 'info'` namespaces.\n+\n+```typescript\n+MikroORM.init({\n+ logger: (message: string) => myLogger.info(message), // defaults to `console.log()`\n+ debug: true, // or provide array like `['query', 'query-params']`\n+ highlight: false, // defaults to true\n+ highlightTheme: { ... }, // you can also provide custom highlight there\n+});\n+```\n+\n+Read more about this in [Debugging](debugging.md) section.\n+\n+## Custom Fail Handler\n+\n+When no entity is found during `em.findOneOrFail()` call, `new Error()` will be thrown. \n+You can customize how the `Error` instance is created via `findOneOrFailHandler`:\n+\n+```typescript\n+MikroORM.init({\n+ findOneOrFailHandler: (entityName: string, where: Dictionary | IPrimaryKey) => {\n+ return new NotFoundException(`${entityName} not found!`);\n+ },\n+});\n+```\n+\n+Read more about this in [Entity Manager](entity-manager.md#handling-not-found-entities) docs.\n+\n+## Migrations\n+\n+Under the `migrations` namespace, you can adjust how the integrated migrations support works.\n+Following example shows all possible options and their defaults:\n+\n+```typescript\n+MikroORM.init({\n+ migrations: {\n+ tableName: 'mikro_orm_migrations', // migrations table name\n+ path: process.cwd() + '/migrations', // path to folder with migration files\n+ pattern: /^[\\w-]+\\d+\\.ts$/, // how to match migration files\n+ transactional: true, // run each migration inside transaction\n+ disableForeignKeys: true, // try to disable foreign_key_checks (or equivalent)\n+ allOrNothing: true, // run all migrations in current batch in master transaction\n+ emit: 'ts', // migration generation mode\n+ },\n+});\n+```\n+\n+Read more about this in [Migrations](migrations.md) section.\n+\n+## Caching\n+\n+By default, metadata discovery results are cached. You can either disable caching, or adjust \n+how it works. Following example shows all possible options and their defaults:\n+\n+```typescript\n+MikroORM.init({\n+ cache: {\n+ enabled: true,\n+ pretty: false, // allows to pretty print the JSON cache\n+ adapter: FileCacheAdapter, // you can provide your own implementation here, e.g. with redis\n+ options: { cacheDir: process.cwd() + '/temp' }, // options will be passed to the constructor of `adapter` class\n+ },\n+});\n+```\n+\n+Read more about this in [Metadata Cache](metadata-cache.md) section.\n+\n+## Importing database dump files (MySQL and Postgress)\n+\n+Using the `mikro-orm database:import db-file.sql` you can import a database dump file. This can be useful when kickstarting an application or could be used in tests to reset the database. Database dumps often have queries spread over multiple lines and therefore you need the following configuration.\n+\n+```typescript\n+MikroORM.init({\n+ ...\n+ multipleStatements: true,\n+ ...\n+});\n+```\n+ > This should be disabled in production environments for added security.\n", "custom-driver.md": "@@ -0,0 +1,108 @@\n+---\n+title: Creating Custom Driver\n+---\n+\n+If you want to use database that is not currently supported, you can implement your own driver.\n+To do so, you will need to design 4 classes:\n+\n+## Platform\n+\n+Platform is a class that provides information about available features of given driver: \n+\n+```typescript\n+import { Platform } from '@mikro-orm/core';\n+\n+export class MyCustomPlatform extends Platform {\n+ \n+ protected abstract schemaHelper: MyCustomSchemaHelper;\n+\n+ // here you can override default settings\n+ usesPivotTable(): boolean;\n+ supportsTransactions(): boolean;\n+ supportsSavePoints(): boolean;\n+ getNamingStrategy(): { new (): NamingStrategy; };\n+ getIdentifierQuoteCharacter(): string;\n+ getParameterPlaceholder(index?: number): string;\n+ usesReturningStatement(): boolean;\n+ normalizePrimaryKey<T = number | string>(data: IPrimaryKey): T;\n+ denormalizePrimaryKey(data: IPrimaryKey): IPrimaryKey;\n+ getSerializedPrimaryKeyField(field: string): string;\n+\n+}\n+```\n+\n+## SchemaHelper\n+\n+Part of platform is a `SchemaHelper`, that provides information about how to build schema.\n+\n+```typescript\n+import { SchemaHelper } from '@mikro-orm/core';\n+\n+export class MyCustomSchemaHelper extends SchemaHelper {\n+ \n+ // here you can override default settings\n+ getIdentifierQuoteCharacter(): string;\n+ getSchemaBeginning(): string;\n+ getSchemaEnd(): string;\n+ getSchemaTableEnd(): string;\n+ getAutoIncrementStatement(meta: EntityMetadata): string;\n+ getPrimaryKeySubtype(meta: EntityMetadata): string;\n+ getTypeDefinition(prop: EntityProperty, types?: Record<string, string>, lengths?: Record<string, number>): string;\n+ getUnsignedSuffix(prop: EntityProperty): string;\n+ supportsSchemaConstraints(): boolean;\n+ supportsSchemaMultiAlter(): boolean;\n+ supportsSequences(): boolean;\n+ quoteIdentifier(field: string): string;\n+ dropTable(meta: EntityMetadata): string;\n+ indexForeignKeys(): boolean;\n+\n+}\n+```\n+\n+## Connection\n+\n+Next part is connection wrapper, that will be responsible for querying the database:\n+\n+```typescript\n+import { Connection } from '@mikro-orm/core';\n+\n+export class MyCustomConnection extends Connection {\n+ \n+ // implement abstract methods\n+ connect(): Promise<void>;\n+ isConnected(): Promise<boolean>;\n+ close(force?: boolean): Promise<void>;\n+ getDefaultClientUrl(): string;\n+ execute(query: string, params?: any[], method?: 'all' | 'get' | 'run'): Promise<QueryResult | any | any[]>;\n+\n+}\n+```\n+\n+## Driver\n+\n+Last part is driver, that is responsible for using the connection to persist changes to \n+database. If you are building SQL driver, it might be handy to extend `AbstractSqlDriver`, \n+if not, extend `DatabaseDriver` abstract class. \n+\n+If you want to have absolute control, you can implement the whole driver yourself via\n+`IDatabaseDriver` interface. \n+\n+```typescript\n+import { DatabaseDriver } from '@mikro-orm/core';\n+\n+export class MyCustomSchemaHelper extends DatabaseDriver {\n+\n+ // initialize connection and platform\n+ protected readonly connection = new MyCustomConnection(this.config);\n+ protected readonly platform = new MyCustomPlatform;\n+\n+ // and implement abstract methods\n+ find<T extends AnyEntity>(entityName: string, where: FilterQuery<T>, populate?: string[], orderBy?: Record<string, QueryOrder>, limit?: number, offset?: number): Promise<T[]>;\n+ findOne<T extends AnyEntity>(entityName: string, where: FilterQuery<T> | string, populate: string[]): Promise<T | null>;\n+ nativeInsert<T extends AnyEntityType<T>>(entityName: string, data: EntityData<T>): Promise<QueryResult>;\n+ nativeUpdate<T extends AnyEntity>(entityName: string, where: FilterQuery<T> | IPrimaryKey, data: EntityData<T>): Promise<QueryResult>;\n+ nativeDelete<T extends AnyEntity>(entityName: string, where: FilterQuery<T> | IPrimaryKey): Promise<QueryResult>;\n+ count<T extends AnyEntity>(entityName: string, where: FilterQuery<T>): Promise<number>;\n+\n+}\n+```\n", "custom-types.md": "@@ -0,0 +1,167 @@\n+---\n+title: Custom Types\n+---\n+\n+You can define custom types by extending `Type` abstract class. It has 4 optional methods:\n+\n+- `convertToDatabaseValue(value: any, platform: Platform): any`\n+\n+ Converts a value from its JS representation to its database representation of this type.\n+ By default returns unchanged `value`.\n+\n+- `convertToJSValue(value: any, platform: Platform): any`\n+\n+ Converts a value from its database representation to its JS representation of this type.\n+ By default returns unchanged `value`.\n+\n+- `toJSON(value: any, platform: Platform): any`\n+\n+ Converts a value from its JS representation to its serialized JSON form of this type.\n+ By default uses the runtime value.\n+ \n+- `getColumnType(prop: EntityProperty, platform: Platform): string`\n+\n+ Gets the SQL declaration snippet for a field of this type.\n+ By default returns `columnType` of given property.\n+\n+```typescript\n+import { Type, Platform, EntityProperty, ValidationError } from '@mikro-orm/core';\n+\n+export class DateType extends Type<Date, string> {\n+\n+ convertToDatabaseValue(value: Date | string | undefined, platform: Platform): string {\n+ if (value instanceof Date) {\n+ return value.toISOString().substr(0, 10);\n+ }\n+\n+ if (!value || value.toString().match(/^\\d{4}-\\d{2}-\\d{2}$/)) {\n+ return value as string;\n+ }\n+\n+ throw ValidationError.invalidType(DateType, value, 'JS');\n+ }\n+\n+ convertToJSValue(value: Date | string | undefined, platform: Platform): Date {\n+ if (!value || value instanceof Date) {\n+ return value as Date;\n+ }\n+\n+ const date = new Date(value);\n+\n+ if (date.toString() === 'Invalid Date') {\n+ throw ValidationError.invalidType(DateType, value, 'database');\n+ }\n+\n+ return date;\n+ }\n+\n+ getColumnType(prop: EntityProperty, platform: Platform) {\n+ return `date(${prop.length})`;\n+ }\n+\n+}\n+```\n+\n+Then you can use this type when defining your entity properties:\n+\n+```typescript\n+@Entity()\n+export class FooBar {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ name!: string;\n+\n+ @Property({ type: DateType, length: 3 })\n+ born?: Date;\n+\n+}\n+```\n+\n+## Types provided by MikroORM\n+\n+There are few types provided by MikroORM. All of them aim to provide similar\n+experience among all the drivers, even if the particular feature is not supported\n+out of box by the driver.\n+\n+### ArrayType\n+\n+In PostgreSQL and MongoDB, it uses native arrays, otherwise it concatenates the \n+values into string separated by commas. This means that you can't use values that\n+contain comma with the `ArrayType` (but you can create custom array type that will\n+handle this case, e.g. by using different separator).\n+\n+By default array of strings is returned from the type. You can also have arrays \n+of numbers or other data types - to do so, you will need to implement custom \n+`hydrate` method that is used for converting the array values to the right type.\n+\n+> `ArrayType` will be used automatically if `type` is set to `array` (default behaviour\n+> of reflect-metadata) or `string[]` or `number[]` (either manually or via ts-morph).\n+> In case of `number[]` it will automatically handle the conversion to numbers. \n+> This means that the following examples would both have the `ArrayType` used\n+> automatically (but with reflect-metadata we would have a string array for both\n+> unless we specify the type manually as `type: 'number[]')\n+\n+```typescript\n+@Property({ type: ArrayType, nullable: true })\n+stringArray?: string[];\n+\n+@Property({ type: new ArrayType(i => +i), nullable: true })\n+numericArray?: number[];\n+```\n+\n+### BigIntType\n+\n+You can use `BigIntType` to support `bigint`s. By default, it will represent the \n+value as a `string`. \n+\n+```typescript\n+@PrimaryKey({ type: BigIntType })\n+id: string;\n+```\n+\n+### BlobType\n+\n+Blob type can be used to store binary data in the database. \n+\n+> `BlobType` will be used automatically if you specify the type hint as `Buffer`. \n+> This means that the following example should work even without the explicit \n+> `type: BlobType` option (with both reflect-metadata and ts-morph providers).\n+\n+```typescript\n+@Property({ type: BlobType, nullable: true })\n+blob?: Buffer;\n+```\n+\n+### JsonType\n+\n+To store objects we can use `JsonType`. As some drivers are handling objects \n+automatically and some don't, this type will handle the serialization in a driver\n+independent way (calling `parse` and `stringify` only when needed).\n+\n+```typescript\n+@Property({ type: JsonType, nullable: true })\n+object?: { foo: string; bar: number };\n+```\n+\n+### DateType\n+\n+To store dates without time information, we can use `DateType`. It does use `date`\n+column type and maps it to the `Date` object. \n+\n+```typescript\n+@Property({ type: DateType, nullable: true })\n+born?: Date;\n+```\n+\n+### TimeType\n+\n+As opposed to the `DateType`, to store only the time information, we can use\n+`TimeType`. It will use the `time` column type, the runtime type is string. \n+\n+```typescript\n+@Property({ type: TimeType, nullable: true })\n+bornTime?: string;\n+```\n", "debugging.md": "@@ -0,0 +1,80 @@\n+---\n+title: Logging\n+---\n+\n+For development purposes it might come handy to enable logging and debug mode:\n+\n+```typescript\n+return MikroORM.init({\n+ debug: true,\n+});\n+```\n+\n+By doing this `MikroORM` will start using `console.log()` function to dump all queries:\n+\n+```\n+[query] select `e0`.* from `author` as `e0` where `e0`.`name` = ? limit ? [took 2 ms]\n+[query] begin [took 1 ms]\n+[query] insert into `author` (`name`, `email`, `created_at`, `updated_at`, `terms_accepted`) values (?, ?, ?, ?, ?) [took 2 ms]\n+[query] commit [took 2 ms]\n+```\n+\n+It is also useful for debugging problems with entity discovery, as you will see information\n+about every processed entity:\n+\n+```\n+[discovery] ORM entity discovery started\n+[discovery] - processing entity Author\n+[discovery] - using cached metadata for entity Author\n+[discovery] - processing entity Book\n+[discovery] - processing entity BookTag\n+[discovery] - entity discovery finished after 13 ms\n+```\n+\n+## Custom Logger\n+\n+You can also provide your own logger via `logger` option. \n+\n+```typescript\n+return MikroORM.init({\n+ debug: true,\n+ logger: msg => myCustomLogger.log(msg),\n+});\n+```\n+\n+## Logger Namespaces\n+\n+There are multiple Logger Namespaces that you can specifically request, while omitting the rest.\n+Just specify array of them via the `debug` option:\n+\n+```typescript\n+return MikroORM.init({\n+ debug: ['query'], // now only queries will be logged\n+});\n+```\n+\n+Currently, there are 4 namespaces \u2013 `query`, `query-params`, `discovery` and `info`.\n+\n+If you provide `query-params` then you must also provide `query` in order for it to take effect.\n+\n+## Highlighters\n+\n+Previously Highlight.js was used to highlight various things in the CLI, \n+like SQL and mongo queries, or migrations or entities generated via CLI.\n+While the library worked fine, it was causing performance issues mainly \n+for those bundling via webpack and using lambdas, as the library was huge.\n+\n+In v4 highlighting is disabled by default, and there are 2 highlighters \n+you can optionally use (you need to install them first).\n+\n+```typescript\n+import { SqlHighlighter } from '@mikro-orm/sql-highlighter';\n+\n+MikroORM.init({\n+ highlighter: new SqlHighlighter(),\n+ // ...\n+});\n+```\n+\n+For MongoDB you can use `MongoHighlighter` from `@mikro-orm/mongo-highlighter` \n+package.\n", "decorators.md": "@@ -0,0 +1,459 @@\n+---\n+title: Decorators\n+---\n+\n+## Entity Definition\n+\n+### @Entity()\n+\n+`@Entity` decorator is used to mark your model classes as entities. Do not use it for \n+abstract base classes.\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `tableName` | `string` | yes | Override default collection/table name. |\n+| `collection` | `string` | yes | Alias for `tableName`. |\n+| `comment` | `string` | yes | Specify comment to table **(SQL only)** |\n+| `customRepository` | `() => EntityRepository` | yes | Set custom repository class. |\n+\n+> You can also use `@Repository()` decorator instead of `customRepository` parameter.\n+\n+```typescript\n+@Entity({ tableName: 'authors' })\n+export class Author { ... }\n+```\n+\n+## Entity Properties\n+\n+### @Property()\n+\n+`@Property()` decorator is used to define regular entity property. All following decorators\n+extend the `@Property()` decorator, so you can also use its parameters there. \n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `fieldName` | `string` | yes | Override default property name (see [Naming Strategy](naming-strategy.md)). |\n+| `type` | `string` &#124; `Type` | yes | Explicitly specify the runtime type (see [Metadata Providers](metadata-providers.md) and [Custom Types](custom-types.md)). |\n+| `onUpdate` | `() => any` | yes | Automatically update the property value every time entity gets updated. |\n+| `persist` | `boolean` | yes | Set to `false` to define [Shadow Property](serializing.md#shadow-properties). |\n+| `hidden` | `boolean` | yes | Set to `true` to omit the property when [Serializing](serializing.md). |\n+| `columnType` | `string` | yes | Specify exact database column type for [Schema Generator](schema-generator.md). **(SQL only)** |\n+| `length` | `number` | yes | Length/precision of database column, used for `datetime/timestamp/varchar` column types for [Schema Generator](schema-generator.md). **(SQL only)** |\n+| `default` | `any` | yes | Specify default column value for [Schema Generator](schema-generator.md). **(SQL only)** |\n+| `unique` | `boolean` | yes | Set column as unique for [Schema Generator](schema-generator.md).. **(SQL only)** |\n+| `nullable` | `boolean` | yes | Set column as nullable for [Schema Generator](schema-generator.md).. **(SQL only)** |\n+| `unsigned` | `boolean` | yes | Set column as unsigned for [Schema Generator](schema-generator.md).. **(SQL only)** |\n+| `comment` | `string` | yes | Specify comment of column for [Schema Generator](schema-generator.md).. **(SQL only)** |\n+| `version` | `boolean` | yes | Set to true to enable [Optimistic Locking](transactions.md#optimistic-locking). **(SQL only)** |\n+\n+> You can use property initializers as usual.\n+\n+```typescript\n+@Property({ length: 50, fieldName: 'first_name' })\n+name!: string;\n+\n+@Property({ columnType: 'datetime', fieldName: 'born_date' })\n+born?: Date;\n+\n+@Property({ columnType: 'tinyint' })\n+age?: number;\n+\n+@Property({ onUpdate: () => new Date() })\n+updatedAt = new Date();\n+\n+@Property()\n+registered = false;\n+```\n+\n+### @PrimaryKey()\n+\n+`@PrimaryKey()` decorator is used to define entity's unique primary key identifier. \n+\n+> `@PrimaryKey()` decorator extend the `@Property()` decorator, so you can use all \n+> its parameters.\n+\n+> Every entity needs to have at least one primary key (see composite primary keys).\n+\n+> Note that if only one PrimaryKey is set and it's type is number it will be set to auto incremented automatically in all SQL drivers. \n+\n+```typescript\n+@PrimaryKey()\n+id!: number; // auto increment PK in SQL drivers\n+\n+@PrimaryKey()\n+uuid = uuid.v4(); // uuid PK in SQL drivers\n+\n+@PrimaryKey()\n+_id!: ObjectId; // ObjectId PK in mongodb driver\n+```\n+\n+### @SerializedPrimaryKey()\n+\n+> Property marked with `@SerializedPrimaryKey()` is virtual, it will not be persisted \n+> into the database.\n+\n+For MongoDB you can define serialized primary key, which will be then used in entity \n+serialization via `JSON.stringify()` (through method `entity.toJSON()`).\n+You will be able to use it to manipulate with the primary key as string. \n+ \n+See [Usage with MongoDH](usage-with-mongo.md) and [Serializing](serializing.md).\n+\n+```typescript\n+@PrimaryKey()\n+_id: ObjectId;\n+\n+@SerializedPrimaryKey()\n+id!: string;\n+```\n+\n+### @Enum()\n+\n+> `@Enum()` decorator extend the `@Property()` decorator, so you can use all its \n+> parameters.\n+\n+`@Enum()` decorator can be used for both numeric and string enums. By default enums are \n+considered numeric, and will be represented in the database schema as `tinyint/smallint`. \n+For string enums, if you define the enum in same file, its values will be automatically \n+sniffed. \n+\n+See [Defining Entities](defining-entities.md#enums).\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `items` | `number[]` &#124; `string[]` &#124; `() => Dictionary` | yes | Specify enum items explicitly. |\n+\n+```typescript\n+@Enum() // with ts-morph metadata provider we do not need to specify anything\n+enum0 = MyEnum1.VALUE_1;\n+\n+@Enum(() => MyEnum1) // or @Enum({ items: () => MyEnum1 })\n+enum1 = MyEnum1.VALUE_1;\n+\n+@Enum({ type: 'MyEnum2', nullable: true })\n+enum2?: MyEnum2; // MyEnum2 needs to be defined in current file (can be re-exported)\n+\n+@Enum({ items: [1, 2, 3] })\n+enum3 = 3;\n+\n+@Enum({ items: ['a', 'b', 'c'] })\n+enum4 = 'a';\n+```\n+\n+### @Formula()\n+\n+`@Formula()` decorator can be used to map some SQL snippet to your entity. \n+The SQL fragment can be as complex as you want and even include subselects.\n+\n+See [Defining Entities](defining-entities.md#formulas).\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `formula` | `string` &#124; `() => string` | no | SQL fragment that will be part of the select clause. |\n+\n+```typescript\n+@Formula('obj_length * obj_height * obj_width')\n+objectVolume?: number;\n+```\n+\n+### @Index() and @Unique()\n+\n+Use `@Index()` to create an index, or `@Unique()` to create unique constraint. You can \n+use those decorators both on the entity level and on property level. To create compound\n+index, use the decorator on the entity level and provide list of property names via the\n+`properties` option.\n+\n+See [Defining Entities](defining-entities.md#indexes).\n+\n+| Parameter | Type | Optional | Description |\n+|--------------|----------|----------|-------------|\n+| `name` | `string` | yes | index name |\n+| `properties` | `string` &#124; `string[]` | yes | list of properties, required when using on entity level |\n+| `type` | `string` | yes | index type, not available for `@Unique()` |\n+\n+```typescript\n+@Entity()\n+@Index({ properties: ['name', 'age'] }) // compound index, with generated name\n+@Index({ name: 'custom_idx_name', properties: ['name'] }) // simple index, with custom name\n+@Unique({ properties: ['name', 'email'] })\n+export class Author {\n+\n+ @Property()\n+ @Unique()\n+ email!: string;\n+\n+ @Index() // generated name\n+ @Property()\n+ age?: number;\n+\n+ @Index({ name: 'born_index' })\n+ @Property()\n+ born?: Date;\n+\n+}\n+```\n+\n+## Entity Relationships\n+\n+All relationship decorators have `entity`, `cascade` and `eager` optional parameters. \n+If you use the default `ReflectMetadataProvider`, then `entity` parameter might be required \n+You will be warned about it being not defined while required during discovery process if you \n+use `ReflectMetadataProvider`. \n+\n+You can also use `type` parameter instead of it - the difference being that `type` parameter\n+needs to be string, while in `entity` parameter you can provide a reference (wrapped in \n+a callback to overcome issues with circular dependencies) to the entity, which plays nice \n+with refactoring features in IDEs like WebStorm. \n+\n+> If you explicitly provide `entity` as a reference, it will enable type checks for other\n+> reference parameters like `inversedBy` or `mappedBy`.\n+\n+### @ManyToOne()\n+\n+> `@ManyToOne()` decorator extend the `@Property()` decorator, so you can use all \n+> its parameters.\n+\n+Many instances of the current Entity refer to One instance of the referred Entity.\n+\n+See [Defining Entities](relationships.md#manytoone) for more examples.\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `entity` | `string` &#124; `() => EntityName` | yes | Set target entity type. |\n+| `cascade` | `Cascade[]` | yes | Set what actions on owning entity should be cascaded to the relationship. Defaults to `[Cascade.PERSIST, Cascade.MERGE]` (see [Cascading](cascading.md)). |\n+| `eager` | `boolean` | yes | Always load the relationship. |\n+| `inversedBy` | `(string & keyof T) ` &#124; ` (e: T) => any` | yes | Point to the inverse side property name. |\n+| `wrappedReference` | `boolean` | yes | Wrap the entity in [`Reference` wrapper](entity-references.md). |\n+| `onDelete` | `string` | yes | [Referential integrity](cascading.md#declarative-referential-integrity). |\n+| `onUpdateIntegrity` | `string` | yes | [Referential integrity](cascading.md#declarative-referential-integrity). |\n+\n+```typescript\n+@ManyToOne()\n+author1?: Author; // type taken via reflection (TsMorphMetadataProvider)\n+\n+@ManyToOne(() => Author) // explicit type\n+author2?: Author;\n+\n+@ManyToOne({ entity: () => Author, cascade: [Cascade.ALL] }) // options object\n+author3?: Author;\n+```\n+\n+### @OneToOne()\n+\n+> `@OneToOne()` decorator extend the `@Property()` decorator, so you can use all \n+> its parameters.\n+\n+One instance of the current Entity refers to One instance of the referred Entity.\n+\n+See [Defining Entities](relationships.md#onetoone) for more examples, including bi-directional 1:1.\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `entity` | `string` &#124; `() => EntityName` | yes | Set target entity type. |\n+| `cascade` | `Cascade[]` | yes | Set what actions on owning entity should be cascaded to the relationship. Defaults to `[Cascade.PERSIST, Cascade.MERGE]` (see [Cascading](cascading.md)). |\n+| `eager` | `boolean` | yes | Always load the relationship. |\n+| `owner` | `boolean` | yes | Explicitly set as owning side (same as providing `inversedBy`). |\n+| `inversedBy` | `(string & keyof T) ` &#124; ` (e: T) => any` | yes | Point to the inverse side property name. |\n+| `mappedBy` | `(string & keyof T)` &#124; `(e: T) => any` | yes | Point to the owning side property name. |\n+| `wrappedReference` | `boolean` | yes | Wrap the entity in [`Reference` wrapper](entity-references.md). |\n+| `orphanRemoval` | `boolean` | yes | Remove the entity when it gets disconnected from the relationship (see [Cascading](cascading.md#orphan-removal)). |\n+| `joinColumn` | `string` | yes | Override default database column name on the owning side (see [Naming Strategy](naming-strategy.md)). |\n+| `onDelete` | `string` | yes | [Referential integrity](cascading.md#declarative-referential-integrity). |\n+| `onUpdateIntegrity` | `string` | yes | [Referential integrity](cascading.md#declarative-referential-integrity). |\n+\n+```typescript\n+// when none of `owner/inverseBy/mappedBy` is provided, it will be considered owning side\n+@OneToOne()\n+bestFriend1!: User;\n+\n+// side with `inversedBy` is the owning one, to define inverse side use `mappedBy`\n+@OneToOne({ inversedBy: 'bestFriend1', orphanRemoval: true })\n+bestFriend2!: User;\n+\n+// when defining it like this, you need to specifically mark the owning side with `owner: true`\n+@OneToOne(() => User, user => user.bestFriend2, { owner: true, orphanRemoval: true })\n+bestFriend3!: User;\n+```\n+\n+### @OneToMany()\n+\n+> `@OneToMany()` decorator extend the `@Property()` decorator, so you can use all \n+> its parameters.\n+\n+One instance of the current Entity has Many instances (references) to the referred Entity.\n+\n+See [Defining Entities](relationships.md#onetomany) for more examples, including bi-directional 1:m.\n+\n+> You need to initialize the value with `Collection<T>` instance.\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `mappedBy` | `(string & keyof T)` &#124; `(e: T) => any` | no | Point to the owning side property name. |\n+| `entity` | `string` &#124; `() => EntityName` | yes | Set target entity type. |\n+| `cascade` | `Cascade[]` | yes | Set what actions on owning entity should be cascaded to the relationship. Defaults to `[Cascade.PERSIST, Cascade.MERGE]` (see [Cascading](cascading.md)). |\n+| `eager` | `boolean` | yes | Always load the relationship. |\n+| `orphanRemoval` | `boolean` | yes | Remove the entity when it gets disconnected from the connection (see [Cascading](cascading.md#orphan-removal)). |\n+| `orderBy` | `{ [field: string]: QueryOrder }` | yes | Set default ordering condition. |\n+| `joinColumn` | `string` | yes | Override default database column name on the owning side (see [Naming Strategy](naming-strategy.md)). |\n+| `inverseJoinColumn` | `string` | yes | Override default database column name on the inverse side (see [Naming Strategy](naming-strategy.md)). |\n+\n+```typescript\n+@OneToMany(() => Book, book => book.author)\n+books1 = new Collection<Book>(this);\n+\n+@OneToMany({ mappedBy: 'author', cascade: [Cascade.ALL] })\n+books2 = new Collection<Book>(this); // target entity type can be read via `TsMorphMetadataProvider` too\n+```\n+\n+### @ManyToMany()\n+\n+> `@ManyToMany()` decorator extend the `@Property()` decorator, so you can use all \n+> its parameters.\n+\n+Many instances of the current Entity refers to Many instances of the referred Entity.\n+\n+See [Defining Entities](relationships.md#manytomany) for more examples, including bi-directional m:n.\n+\n+> You need to initialize the value with `Collection<T>` instance.\n+\n+| Parameter | Type | Optional | Description |\n+|-----------|------|----------|-------------|\n+| `entity` | `string` &#124; `() => EntityName` | yes | Set target entity type. |\n+| `cascade` | `Cascade[]` | yes | Set what actions on owning entity should be cascaded to the relationship. Defaults to `[Cascade.PERSIST, Cascade.MERGE]` (see [Cascading](cascading.md)). |\n+| `eager` | `boolean` | yes | Always load the relationship. |\n+| `owner` | `boolean` | yes | Explicitly set as owning side (same as providing `inversedBy`). |\n+| `inversedBy` | `(string & keyof T) ` &#124; ` (e: T) => any` | yes | Point to the inverse side property name. |\n+| `mappedBy` | `(string & keyof T)` &#124; `(e: T) => any` | yes | Point to the owning side property name. |\n+| `orderBy` | `{ [field: string]: QueryOrder }` | yes | Set default ordering condition. |\n+| `fixedOrder` | `boolean` | yes | Force stable insertion order of items in the collection (see [Collections](collections.md#forcing-fixed-order-of-collection-items)). |\n+| `fixedOrderColumn` | `string` | yes | Override default order column name (`id`). |\n+| `pivotTable` | `string` | yes | Override default name for pivot table (see [Naming Strategy](naming-strategy.md)). |\n+| `joinColumn` | `string` | yes | Override default database column name on the owning side (see [Naming Strategy](naming-strategy.md)). |\n+| `inverseJoinColumn` | `string` | yes | Override default database column name on the inverse side (see [Naming Strategy](naming-strategy.md)). |\n+\n+```typescript\n+@ManyToMany({ entity: () => BookTag, cascade: [], fixedOrderColumn: 'order' })\n+tags = new Collection<BookTag>(this); // m:n with autoincrement PK\n+\n+@ManyToMany(() => BookTag, undefined, { pivotTable: 'book_to_tag_unordered', orderBy: { name: QueryOrder.ASC } })\n+tagsUnordered = new Collection<BookTag>(this); // m:n with composite PK\n+```\n+\n+## Lifecycle Hooks\n+\n+You can use lifecycle hooks to run some code when entity gets persisted. You can mark any of\n+entity methods with them, you can also mark multiple methods with same hook.\n+\n+> All hooks support async methods with one exception - `@OnInit`.\n+\n+### @OnInit()\n+\n+Fired when new instance of entity is created, either manually `em.create()`, or \n+automatically when new entities are loaded from database\n+\n+> `@OnInit` is not fired when you create the entity manually via its constructor (`new MyEntity()`)\n+\n+```typescript\n+@OnInit()\n+doStuffOnInit() {\n+ this.fullName = `${this.firstName} - ${this.lastName}`; // initialize shadow property\n+}\n+```\n+\n+### @BeforeCreate()\n+\n+Fired right before we persist the new entity into the database.\n+\n+```typescript\n+@BeforeCreate()\n+async doStuffBeforeCreate() {\n+ // ...\n+}\n+```\n+\n+### @AfterCreate()\n+\n+Fired right after the new entity is created in the database and merged to identity map. \n+Since this event entity will have reference to `EntityManager` and will be \n+enabled to call `entity.init()` method (including all entity references and collections).\n+\n+```typescript\n+@AfterCreate()\n+async doStuffAfterCreate() {\n+ // ...\n+}\n+```\n+\n+### @BeforeUpdate()\n+\n+Fired right before we update the entity in the database.\n+\n+```typescript\n+@BeforeUpdate()\n+async doStuffBeforeUpdate() {\n+ // ...\n+}\n+```\n+\n+### @AfterUpdate()\n+\n+Fired right after the entity is updated in the database. \n+\n+```typescript\n+@AfterUpdate()\n+async doStuffAfterUpdate() {\n+ // ...\n+}\n+```\n+\n+### @BeforeDelete()\n+\n+Fired right before we delete the record from database. It is fired only when\n+removing entity or entity reference, not when deleting records by query. \n+\n+```typescript\n+@BeforeDelete()\n+async doStuffBeforeDelete() {\n+ // ...\n+}\n+```\n+\n+### @AfterDelete()\n+\n+Fired right after the record gets deleted from database and it is unset from the identity map.\n+\n+```typescript\n+@AfterDelete()\n+async doStuffAfterDelete() {\n+ // ...\n+}\n+```\n+\n+## Entity Repository\n+\n+### @Repository()\n+\n+Used to register custom entity repository. \n+\n+> `em.getRepository()` will automatically return custom repository if it is registered.\n+\n+```typescript\n+@Repository(Author)\n+export class CustomAuthorRepository extends EntityRepository<Author> {\n+ // your custom methods...\n+}\n+```\n+\n+## Event Subscriber\n+\n+### @Subscriber()\n+\n+Used to register an event subscriber. Keep in mind that you need to make sure the file \n+gets loaded in order to make this decorator registration work (e.g. you import that file \n+explicitly somewhere).\n+\n+```typescript\n+@Subscriber()\n+export class AuthorSubscriber implements EventSubscriber<Author> {\n+ // ...\n+}\n+```\n", "defining-entities.md": "@@ -0,0 +1,593 @@\n+---\n+title: Defining Entities\n+---\n+\n+There are two ways how you can define your entities:\n+\n+- Decorated classes\n+- `EntitySchema` helper\n+\n+## EntitySchema helper\n+\n+With `EntitySchema` helper you define the schema programmatically. \n+\n+```typescript title=\"./entities/Book.ts\"\n+export interface Book extends BaseEntity {\n+ title: string;\n+ author: Author;\n+ publisher: Publisher;\n+ tags: Collection<BookTag>;\n+}\n+\n+export const schema = new EntitySchema<Book, BaseEntity>({\n+ name: 'Book',\n+ extends: 'BaseEntity',\n+ properties: {\n+ title: { type: 'string' },\n+ author: { reference: 'm:1', entity: 'Author', inversedBy: 'books' },\n+ publisher: { reference: 'm:1', entity: 'Publisher', inversedBy: 'books' },\n+ tags: { reference: 'm:n', entity: 'BookTag', inversedBy: 'books', fixedOrder: true },\n+ },\n+});\n+```\n+\n+When creating new entity instances, you will need to use `em.create()` method that will\n+create instance of internally created class. \n+\n+```typescript\n+const repo = em.getRepository<Author>('Author');\n+const author = repo.create('Author', { name: 'name', email: 'email' }); // instance of internal Author class\n+await repo.persistAndFlush(author);\n+```\n+\n+You can optionally use custom class for entity instances. Read more about this approach \n+in [Defining Entities via EntitySchema section](entity-schema.md).\n+\n+## Classes and Decorators\n+\n+Entities are simple javascript objects (so called POJO), decorated with `@Entity` decorator.\n+No real restrictions are made, you do not have to extend any base class, you are more than welcome\n+to [use entity constructors](entity-constructors.md), just do not forget to specify primary key with\n+`@PrimaryKey` decorator.\n+\n+```typescript title=\"./entities/Book.ts\"\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ createdAt = new Date();\n+\n+ @Property({ onUpdate: () => new Date() })\n+ updatedAt = new Date();\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne() // when you provide correct type hint, ORM will read it for you\n+ author!: Author;\n+\n+ @ManyToOne(() => Publisher) // or you can specify the entity as class reference or string name\n+ publisher?: Publisher;\n+\n+ @ManyToMany() // owning side can be simple as this!\n+ tags = new Collection<BookTag>(this);\n+\n+ constructor(title: string, author: Author) {\n+ this.title = title;\n+ this.author = author;\n+ }\n+\n+}\n+```\n+\n+As you can see, entity properties are decorated either with `@Property` decorator, or with one\n+of reference decorators: `@ManyToOne`, `@OneToMany`, `@OneToOne` and `@ManyToMany`. \n+\n+> From v3 you can also use default exports when defining your entity.\n+\n+Here is another example of `Author` entity, that was referenced from the `Book` one, this \n+time defined for mongo:\n+\n+```typescript title=\"./entities/Author.ts\"\n+@Entity()\n+export class Author {\n+\n+ @PrimaryKey()\n+ _id!: ObjectId;\n+\n+ @SerializedPrimaryKey()\n+ id!: string;\n+\n+ @Property()\n+ createdAt = new Date();\n+\n+ @Property({ onUpdate: () => new Date() })\n+ updatedAt = new Date();\n+\n+ @Property()\n+ name!: string;\n+\n+ @Property()\n+ email!: string;\n+\n+ @Property()\n+ age?: number;\n+\n+ @Property()\n+ termsAccepted = false;\n+\n+ @Property()\n+ identities?: string[];\n+\n+ @Property()\n+ born?: Date;\n+\n+ @OneToMany(() => Book, book => book.author)\n+ books = new Collection<Book>(this);\n+\n+ @ManyToMany()\n+ friends = new Collection<Author>(this);\n+\n+ @ManyToOne()\n+ favouriteBook?: Book;\n+\n+ @Property({ version: true })\n+ version!: number;\n+\n+ constructor(name: string, email: string) {\n+ this.name = name;\n+ this.email = email;\n+ }\n+\n+}\n+```\n+\n+More information about modelling relationships can be found on [modelling relationships page](relationships.md).\n+\n+If you want to define your entity in Vanilla JavaScript, take a look [here](usage-with-js.md).\n+\n+### Optional Properties\n+\n+When you define the property as optional (marked with `?`), this will be automatically considered\n+as nullable property (mainly for SQL schema generator). \n+\n+> This auto-detection works only when you omit the `type`/`entity` attribute.\n+\n+```typescript\n+@ManyToOne()\n+favouriteBook?: Book; // correct: no `type` or `entity` provided, **will** be marked as `nullable`\n+\n+@ManyToOne(() => Book, { nullable: true })\n+favouriteBook?: Book; // correct, `entity` provided and explicitly marked as `nullable`\n+\n+@ManyToOne(() => Book)\n+favouriteBook?: Book; // wrong, not marked as `nullable`\n+```\n+\n+### Default values\n+\n+You can set default value of a property in 2 ways:\n+\n+1. Use runtime default value of the property. This approach should be preferred as long \n+as you are not using any native database function like `now()`. With this approach your\n+entities will have the default value set even before it is actually persisted into the \n+database (e.g. when you instantiate new entity via `new Author()` or `em.create(Author, { ... })`.\n+\n+ ```typescript\n+ @Property()\n+ foo!: number = 1;\n+\n+ @Property()\n+ bar!: string = 'abc';\n+\n+ @Property()\n+ baz!: Date = new Date();\n+ ``` \n+\n+2. Use `default` parameter of `@Property` decorator. This way the actual default value \n+will be provided by the database, and automatically mapped to the entity property after\n+it is being persisted (after flush). To use SQL functions like `now()`, use `defaultRaw`.\n+\n+ > Since v4 you should use `defaultRaw` for SQL functions, as `default` with string values\n+ > will be automatically quoted. \n+\n+ ```typescript\n+ @Property({ default: 1 })\n+ foo!: number;\n+\n+ @Property({ default: 'abc' })\n+ bar!: string;\n+\n+ @Property({ defaultRaw: 'now' })\n+ baz!: Date;\n+ ``` \n+\n+### Enums\n+\n+To define enum property, use `@Enum()` decorator. Enums can be either numeric or string valued. \n+\n+For schema generator to work properly in case of string enums, you need to define the enum \n+is same file as where it is used, so its values can be automatically discovered. If you want \n+to define the enum in another file, you should reexport it also in place where you use it. \n+\n+Another possibility is to provide the reference to the enum implementation in the decorator\n+via `@Enum(() => UserRole)`. \n+\n+> You can also set enum items manually via `items: string[]` attribute. \n+\n+```typescript\n+import { OutsideEnum } from './OutsideEnum.ts';\n+\n+@Entity()\n+export class User {\n+\n+ @Enum()\n+ role!: UserRole; // string enum\n+\n+ @Enum()\n+ status!: UserStatus; // numeric enum\n+\n+ @Enum(() => OutsideEnum)\n+ outside!: OutsideEnum; // string enum defined outside of this file\n+\n+}\n+\n+export enum UserRole {\n+ ADMIN = 'admin',\n+ MODERATOR = 'moderator',\n+ USER = 'user',\n+}\n+\n+export const enum UserStatus {\n+ DISABLED,\n+ ACTIVE,\n+}\n+\n+// or we could reexport OutsideEnum\n+// export { OutsideEnum } from './OutsideEnum.ts';\n+``` \n+\n+### Enum arrays\n+\n+We can also use array of values for enum, in that case, `EnumArrayType` type\n+will be used automatically, that will validate items on flush. \n+\n+```ts\n+enum Role {\n+ User = 'user',\n+ Admin = 'admin',\n+}\n+\n+@Enum({ items: () => Role, array: true, default: [Role.User] })\n+roles: Role[] = [Role.User];\n+```\n+\n+### Mapping directly to primary keys\n+\n+Sometimes we might want to work only with the primary key of a relation. \n+To do that, we can use `mapToPk` option on M:1 and 1:1 relations:\n+\n+```ts\n+@ManyToOne(() => User, { mapToPk: true })\n+user: number;\n+```\n+\n+For composite keys, this will give us ordered tuple representing the raw PKs,\n+which is the internal format of composite PK:\n+\n+```ts\n+@ManyToOne(() => User, { mapToPk: true })\n+user: [string, string]; // [first_name, last_name]\n+```\n+\n+### Formulas\n+\n+`@Formula()` decorator can be used to map some SQL snippet to your entity. \n+The SQL fragment can be as complex as you want and even include subselects.\n+\n+```typescript\n+@Formula('obj_length * obj_height * obj_width')\n+objectVolume?: number;\n+```\n+\n+Formulas will be added to the select clause automatically. In case you are facing \n+problems with `NonUniqueFieldNameException`, you can define the formula as a \n+callback that will receive the entity alias in the parameter:\n+\n+```typescript\n+@Formula(alias => `${alias}.obj_length * ${alias}.obj_height * ${alias}.obj_width`)\n+objectVolume?: number;\n+```\n+\n+### Indexes\n+\n+You can define indexes via `@Index()` decorator, for unique indexes, use `@Unique()` decorator. \n+You can use it either on entity class, or on entity property:\n+\n+```typescript\n+@Entity()\n+@Index({ properties: ['name', 'age'] }) // compound index, with generated name\n+@Index({ name: 'custom_idx_name', properties: ['name'] }) // simple index, with custom name\n+@Unique({ properties: ['name', 'email'] })\n+export class Author {\n+\n+ @Property()\n+ @Unique()\n+ email!: string;\n+\n+ @Property()\n+ @Index() // generated name\n+ age?: number;\n+\n+ @Index({ name: 'born_index' })\n+ @Property()\n+ born?: Date;\n+\n+}\n+```\n+\n+### Custom Types\n+\n+You can define custom types by extending `Type` abstract class. It has 4 optional methods:\n+\n+- `convertToDatabaseValue(value: any, platform: Platform): any`\n+\n+ Converts a value from its JS representation to its database representation of this type.\n+\n+- `convertToJSValue(value: any, platform: Platform): any`\n+\n+ Converts a value from its database representation to its JS representation of this type.\n+\n+- `toJSON(value: any, platform: Platform): any`\n+\n+ Converts a value from its JS representation to its serialized JSON form of this type.\n+ By default converts to the database value.\n+ \n+- `getColumnType(prop: EntityProperty, platform: Platform): string`\n+\n+ Gets the SQL declaration snippet for a field of this type.\n+\n+More information can be found in [Custom Types](custom-types.md) section.\n+\n+### Lazy scalar properties\n+\n+You can mark any property as `lazy: true` to omit it from the select clause. \n+This can be handy for properties that are too large and you want to have them \n+available only some times, like a full text of an article.\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @Property({ columnType: 'text', lazy: true })\n+ text: string;\n+\n+}\n+``` \n+\n+You can use `populate` parameter to load them.\n+\n+```typescript\n+const b1 = await em.find(Book, 1); // this will omit the `text` property\n+const b2 = await em.find(Book, 1, { populate: ['text'] }); // this will load the `text` property\n+```\n+\n+> If the entity is already loaded and you need to populate a lazy scalar property, \n+> you might need to pass `refresh: true` in the `FindOptions`.\n+\n+### Virtual Properties\n+\n+You can define your properties as virtual, either as a method, or via JavaScript `get/set`.\n+\n+Following example defines User entity with `firstName` and `lastName` database fields, that \n+are both hidden from the serialized response, replaced with virtual properties `fullName` \n+(defined as a classic method) and `fullName2` (defined as a JavaScript getter).\n+\n+> For JavaScript getter you need to provide `{ persist: false }` option otherwise the value\n+> would be stored in the database. \n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @Property({ hidden: true })\n+ firstName!: string;\n+\n+ @Property({ hidden: true })\n+ lastName!: string;\n+\n+ @Property({ name: 'fullName' })\n+ getFullName() {\n+ return `${this.firstName} ${this.lastName}`;\n+ }\n+\n+ @Property({ persist: false })\n+ get fullName2() {\n+ return `${this.firstName} ${this.lastName}`;\n+ }\n+\n+}\n+\n+const repo = em.getRepository(User);\n+const author = repo.create({ firstName: 'Jon', lastName: 'Snow' });\n+\n+console.log(author.getFullName()); // 'Jon Snow'\n+console.log(author.fullName2); // 'Jon Snow'\n+console.log(author.toJSON()); // { fullName: 'Jon Snow', fullName2: 'Jon Snow' }\n+```\n+\n+### Entity file names\n+\n+Starting with MikroORM 4.2, there is no limitation for entity file names. It is now\n+also possible to define multiple entities in a single file using folder based discovery. \n+\n+### Using BaseEntity\n+\n+You can define your own base entity with properties that you require on all entities, like\n+primary key and created/updated time. Single table inheritance is also supported.\n+\n+Read more about this topic in [Inheritance Mapping](inheritance-mapping.md) section.\n+\n+> If you are initializing the ORM via `entities` option, you need to specify all your\n+> base entities as well.\n+\n+```typescript title=\"./entities/BaseEntity.ts\"\n+import { v4 } from 'uuid';\n+\n+export abstract class BaseEntity {\n+\n+ @PrimaryKey()\n+ uuid = v4();\n+\n+ @Property()\n+ createdAt = new Date();\n+\n+ @Property({ onUpdate: () => new Date() })\n+ updatedAt = new Date();\n+\n+}\n+```\n+\n+There is a special case, when we need to annotate the base entity - if we are using\n+folder based discovery, and the base entity is not using any decorators (e.g. it does\n+not define any decorated property). In that case, we need to mark it as abstract:\n+\n+```ts\n+@Entity({ abstract: true })\n+export abstract class BaseEntity {\n+ // ...\n+}\n+```\n+\n+### Examples of entity definition with various primary keys\n+\n+#### Using id as primary key (SQL drivers)\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number; // string is also supported\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+```\n+\n+#### Using UUID as primary key (SQL drivers)\n+\n+```typescript\n+import { v4 } from 'uuid';\n+\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ uuid = v4();\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+```\n+\n+#### Using PostgreSQL [uuid-osp](https://www.postgresql.org/docs/current/uuid-ossp.html) module function as primary key\n+\n+Requires enabling the module via: `create extension \"uuid-ossp\";`\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey({ type: 'uuid', defaultRaw: 'uuid_generate_v4()' })\n+ uuid: string;\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+```\n+\n+#### Using BigInt as primary key (MySQL and PostgreSQL)\n+\n+You can use `BigIntType` to support `bigint`s. By default it will represent the value as\n+a `string`. \n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey({ type: BigIntType })\n+ id: string;\n+\n+}\n+```\n+\n+If you want to use native `bigint`s, read the following guide: [Using native BigInt PKs](using-bigint-pks.md).\n+\n+\n+#### Example of Mongo entity\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ _id!: ObjectId;\n+\n+ @SerializedPrimaryKey() \n+ id!: string; // string variant of PK, will be handled automatically\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+```\n+\n+#### Using BaseEntity (previously WrappedEntity)\n+\n+From v4 `BaseEntity` class is provided with `init`, `isInitialized`, `assign`\n+and other methods that are otherwise available via the `wrap()` helper.\n+\n+> Usage of `BaseEntity` is optional.\n+\n+```typescript\n+import { BaseEntity } from '@mikro-orm/core';\n+\n+@Entity()\n+export class Book extends BaseEntity {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+}\n+\n+const book = new Book();\n+console.log(book.isInitialized()); // true\n+```\n+\n+With your entities set up, you can start [using entity manager](entity-manager.md) and \n+[repositories](repositories.md) as described in following sections. \n", "deployment.md": "@@ -0,0 +1,270 @@\n+---\n+title: Deployment\n+---\n+\n+Under the hood, `MikroORM` uses [`ts-morph`](https://github.com/dsherret/ts-morph) to read \n+TypeScript source files of all entities to be able to detect all types. Thanks to this, \n+defining the type is enough for runtime validation.\n+\n+This has some consequences for deployment of your application. Sometimes you will want to \n+deploy only your compiled output, without TS source files at all. In that case, discovery \n+process will probably fail. You have several options:\n+\n+## Deploy pre-built cache\n+\n+By default, output of metadata discovery will be cached in `temp` folder. You can reuse this \n+cache in your deployed application. Currently the cache is saved in files named like the entity\n+source file, e.g. `Author.ts` entity will store cache in `temp/Author.ts.json` file.\n+\n+When running compiled code, JS entities will be taken into account instead, so you will need to \n+generate the cache by running the compiled code locally. That will generate `temp/Author.js.json`, \n+which is the file you will need to deploy alongside your application. \n+\n+## Fill type or entity attributes everywhere\n+\n+What discovery process does is to sniff TS types and save their value to string, so it can be \n+used later for validation. You can skip the whole process by simply providing those values \n+manually:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey({ type: 'number' })\n+ id!: number;\n+\n+ @Property({ type: 'string' })\n+ title!: string;\n+\n+ @Enum(() => BookStatus)\n+ status?: BookStatus;\n+\n+ @ManyToOne(() => Author) // or `@ManyToOne({ type: 'Author' })` or `@ManyToOne({ entity: () => Author })`\n+ author1!: Author;\n+\n+ // or\n+ @ManyToOne({ type: 'Author' })\n+ author2!: Author;\n+\n+ // or\n+ @ManyToOne({ entity: () => Author })\n+ author3!: Author;\n+\n+}\n+\n+export enum BookStatus {\n+ SOLD_OUT = 'sold',\n+ ACTIVE = 'active',\n+ UPCOMING = 'upcoming',\n+ }\n+```\n+\n+> For numeric enums this is not be required.\n+\n+## Deploy your entity source files\n+\n+Usually it does not matter much that you deploy more files than needed, so the easiest way\n+is to just deploy your TS source files next to the compiled output, just like during development.\n+\n+## Deploy a bundle of entities and dependencies with [Webpack](https://webpack.js.org/)\n+\n+Webpack can be used to bundle every entity and dependency: you get a single file that contains \n+every required module/file and has no external dependencies.\n+\n+### Prepare your project for Webpack\n+\n+Webpack requires every required file to be hardcoded in your code. Code like this won't work \n+(it will throw an error because Webpack doesn't know which file to include in the bundle):\n+\n+```typescript\n+let dependencyNameInVariable = 'dependency';\n+const dependency = import(dependencyNameInVariable);\n+```\n+\n+As Webpack creates a file bundle, it isn't desired that it scans directories for entities \n+or metadata. Therefore you need to provide list of entities in the `entities` option in \n+the initialization function, folder/file based discovery is not supported (see dynamically \n+including entities as an alternative solution). Also you need to fill `type` or `entity` \n+attributes everywhere (see above) and disable caching (it will decrease start-time slightly).\n+\n+> In v4 caching is disabled by default when using `ReflectMetadataProvider`.\n+\n+#### Disabling dynamic file access\n+\n+First thing you should do is to disable dynamic file access in the discovery process via the\n+`discovery.disableDynamicFileAccess` toggle. This will effectively do:\n+\n+- set metadata provider to `ReflectMetadataProvider`\n+- disable caching\n+- disallow usage of paths in `entities/entitiesTs`\n+\n+#### Manually defining entities\n+\n+```typescript\n+import { Author, Book, BookTag, Publisher, Test } from '../entities';\n+\n+await MikroORM.init({\n+ ...\n+ entities: [Author, Book, BookTag, Publisher, Test],\n+ discovery: { disableDynamicFileAccess: true },\n+ ...\n+});\n+```\n+\n+#### Dynamically loading dependencies\n+\n+This will make use of a Webpack feature called [dynamic imports](https://webpack.js.org/guides/code-splitting/#dynamic-imports). \n+This way you can import dependencies as long as part of the path is known.\n+\n+In following example [`require.context`](https://webpack.js.org/guides/dependency-management/#requirecontext) \n+is used. This 'function' is only usable during the building process from Webpack so therefore \n+there is an alternative solution provided that will as long as the environment variable \n+WEBPACK is not set (e.g. during development with `ts-node`).\n+\n+Here, all files with the extension `.ts` will be imported from the directory `../entities`. \n+\n+> [`flatMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/flatMap) is a method from ECMAScript 2019 and requires [Node.js](https://nodejs.org/) 11 or higher.\n+\n+```typescript\n+await MikroORM.init({\n+ // ...\n+ entities: await getEntities(),\n+ discovery: { disableDynamicFileAccess: true },\n+ // ...\n+});\n+\n+async function getEntities(): Promise<any[]> {\n+ if (process.env.WEBPACK) {\n+ const modules = require.context('../entities', true, /\\.ts$/);\n+\n+ return modules\n+ .keys()\n+ .map(r => modules(r))\n+ .flatMap(mod => Object.keys(mod).map(className => mod[className]));\n+ }\n+\n+ const promises = fs.readdirSync('../entities').map(file => import(`../entities/${file}`));\n+ const modules = await Promise.all(promises);\n+\n+ return modules.flatMap(mod => Object.keys(mod).map(className => mod[className]));\n+}\n+```\n+\n+### Webpack configuration\n+\n+Webpack can be run without [configuration file](https://webpack.js.org/configuration/) but \n+for building MikroORM and [Node.js](https://nodejs.org/) bundles it requires additional \n+configuration. Configuration for Webpack is stored in the root of the project as \n+`webpack.config.js`. For all the options please refer to the following [page](https://webpack.js.org/configuration/).\n+\n+For bundling MikroORM the following configuration is required:\n+\n+```javascript\n+const path = require('path');\n+const { EnvironmentPlugin, IgnorePlugin } = require('webpack');\n+const TerserPlugin = require('terser-webpack-plugin');\n+\n+// Mark our dev dependencies as externals so they don't get included in the webpack bundle.\n+const { devDependencies } = require('./package.json');\n+const externals = {};\n+\n+for (const devDependency of Object.keys(devDependencies)) {\n+ externals[devDependency] = `commonjs ${devDependency}`;\n+}\n+\n+// And anything MikroORM's packaging can be ignored if it's not on disk.\n+// Later we check these dynamically and tell webpack to ignore the ones we don't have.\n+const optionalModules = new Set([\n+ ...Object.keys(require('knex/package.json').browser),\n+ ...Object.keys(require('@mikro-orm/core/package.json').peerDependencies),\n+ ...Object.keys(require('@mikro-orm/core/package.json').devDependencies)\n+]);\n+\n+module.exports = {\n+ entry: path.resolve('app', 'server.ts'),\n+\n+ // You can toggle development mode on to better see what's going on in the webpack bundle,\n+ // but for anything that is getting deployed, you should use 'production'.\n+ // mode: 'development',\n+ mode: 'production',\n+\n+ optimization: {\n+ minimizer: [\n+ new TerserPlugin({\n+ terserOptions: {\n+ // We want to minify the bundle, but don't want Terser to change the names of our entity\n+ // classes. This can be controlled in a more granular way if needed, (see\n+ // https://terser.org/docs/api-reference.html#mangle-options) but the safest default\n+ // config is that we simply disable mangling altogether but allow minification to proceed.\n+ mangle: false,\n+ }\n+ })\n+ ]\n+ },\n+ target: 'node',\n+ module: {\n+ rules: [\n+ // Bring in our typescript files.\n+ {\n+ test: /\\.ts$/,\n+ exclude: /node_modules/,\n+ loader: 'ts-loader',\n+ },\n+\n+ // Native modules can be bundled as well.\n+ {\n+ test: /\\.node$/,\n+ use: 'node-loader',\n+ },\n+\n+ // Some of MikroORM's dependencies use mjs files, so let's set them up here.\n+ {\n+ test: /\\.mjs$/,\n+ include: /node_modules/,\n+ type: 'javascript/auto',\n+ },\n+ ],\n+ },\n+\n+ // These are computed above.\n+ externals,\n+\n+ resolve: {\n+ extensions: ['.ts', '.js']\n+ },\n+\n+ plugins: [\n+ // Ignore any of our optional modules if they aren't installed. This ignores database drivers\n+ // that we aren't using for example.\n+ new EnvironmentPlugin({ WEBPACK: true }),\n+ new IgnorePlugin({\n+ checkResource: resource => {\n+ const [baseResource] = resource.split('/');\n+\n+ if (optionalModules.has(baseResource)) {\n+ try {\n+ require.resolve(resource);\n+ return false;\n+ } catch {\n+ return true;\n+ }\n+ }\n+\n+ return false;\n+ },\n+ }),\n+ ],\n+\n+ output: {\n+ filename: 'server.js',\n+ libraryTarget: 'commonjs',\n+ path: path.resolve(__dirname, '..', 'output'),\n+ },\n+};\n+```\n+\n+### Running Webpack\n+\n+To run Webpack execute `webpack` (or `npx webpack` if not installed globally) in the root \n+of the project. It will probably throw a few warnings but you can ignore the errors regarding \n+MikroORM: the mentioned pieces of code won't be executed if properly bundled with Webpack.\n", "embeddables.md": "@@ -0,0 +1,122 @@\n+---\n+title: Separating Concerns using Embeddables\n+sidebar_label: Embeddables\n+---\n+\n+> Support for embeddables was added in version 4.0\n+\n+Embeddables are classes which are not entities themselves, but are embedded in \n+entities and can also be queried. You'll mostly want to use them to reduce \n+duplication or separating concerns. Value objects such as date range or address \n+are the primary use case for this feature.\n+\n+> Embeddables needs to be discovered just like regular entities, don't forget to \n+> add them to the list of entities when initializing the ORM.\n+\n+Embeddables can only contain properties with basic `@Property()` mapping.\n+\n+For the purposes of this tutorial, we will assume that you have a `User` class in \n+your application and you would like to store an address in the `User` class. We will \n+model the `Address` class as an embeddable instead of simply adding the respective \n+columns to the `User` class.\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @Embedded()\n+ address!: Address;\n+\n+}\n+\n+@Embeddable()\n+export class Address {\n+ \n+ @Property()\n+ street!: string;\n+\n+ @Property()\n+ postalCode!: string;\n+\n+ @Property()\n+ city!: string;\n+\n+ @Property()\n+ country!: string;\n+\n+}\n+```\n+\n+> When using ReflectMetadataProvider, you might need to provide the class in decorator options:\n+> `@Embedded(() => Address)` or `@Embedded({ entity: () => Address })`.\n+\n+In terms of your database schema, MikroORM will automatically inline all columns from \n+the `Address` class into the table of the `User` class, just as if you had declared \n+them directly there.\n+\n+## Initializing embeddables\n+\n+In case all fields in the embeddable are nullable, you might want to initialize the \n+embeddable, to avoid getting a null value instead of the embedded object.\n+\n+```typescript\n+@Embedded()\n+address = new Address();\n+```\n+\n+## Column Prefixing\n+\n+By default, MikroORM names your columns by prefixing them, using the value object name.\n+\n+Following the example above, your columns would be named as `address_street`, \n+`address_postal_code`...\n+\n+You can change this behaviour to meet your needs by changing the `prefix` attribute \n+in the `@Embedded()` notation.\n+\n+The following example shows you how to set your prefix to `myPrefix_`:\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @Embedded({ prefix: 'myPrefix_' })\n+ address!: Address;\n+\n+}\n+```\n+\n+To have MikroORM drop the prefix and use the value object's property name directly, \n+set `prefix: false`:\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @Embedded({ entity: () => Address, prefix: false })\n+ address!: Address;\n+\n+}\n+```\n+\n+## Storing embeddables as objects\n+\n+From MikroORM v4.2 we can also store the embeddable as an object instead of\n+inlining its properties to the owing entity.\n+\n+```ts\n+@Entity()\n+export class User {\n+\n+ @Embedded({ entity: () => Address, object: true })\n+ address!: Address;\n+\n+}\n+```\n+\n+In SQL drivers, this will use a JSON column to store the value. \n+\n+> Only MySQL and PostgreSQL drivers support searching by JSON properties currently.\n+\n+> This part of documentation is highly inspired by [doctrine tutorial](https://www.doctrine-project.org/projects/doctrine-orm/en/latest/tutorials/embeddables.html)\n+> as the behaviour here is pretty much the same.\n", "entity-constructors.md": "@@ -0,0 +1,37 @@\n+---\n+title: Using Entity Constructors\n+---\n+\n+Internally, `MikroORM` never calls entity constructor, so you are free to use it as you wish.\n+The constructor will be called only when you instantiate the class yourself via `new` operator,\n+so it is a handy place to require your data when creating new entity.\n+\n+For example following `Book` entity definition will always require to set `title` and `author`, \n+but `publisher` will be optional:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ title!: string;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+ @ManyToOne()\n+ publisher?: Publisher;\n+\n+ @ManyToMany({ entity: () => BookTag, inversedBy: 'books' })\n+ tags = new Collection<BookTag>(this);\n+\n+ constructor(title: string, author: Author) {\n+ this.title = title;\n+ this.author = author;\n+ }\n+\n+}\n+```\n", "entity-generator.md": "@@ -0,0 +1,43 @@\n+---\n+title: Entity Generator\n+---\n+\n+To generate entities from existing database schema, you can use `EntityGenerator` helper. \n+\n+You can use it via CLI: \n+\n+```sh\n+npx mikro-orm generate-entities --dump # Dumps all generated entities\n+npx mikro-orm generate-entities --save --path=./my-entities # Saves entities into given directory\n+```\n+\n+Or you can create simple script where you initialize MikroORM like this:\n+\n+```typescript title=\"./generate-entities.ts\"\n+import { MikroORM } from '@mikro-orm/core';\n+\n+(async () => {\n+ const orm = await MikroORM.init({\n+ dbName: 'your-db-name',\n+ // ...\n+ });\n+ const generator = orm.getEntityGenerator();\n+ const dump = await generator.generate({ \n+ save: true,\n+ baseDir: process.cwd() + '/my-entities',\n+ });\n+ console.log(dump);\n+ await orm.close(true);\n+})();\n+```\n+\n+Then run this script via `ts-node` (or compile it to plain JS and use `node`):\n+\n+```sh\n+$ ts-node generate-entities\n+```\n+\n+## Current limitations\n+\n+- many to many relations are not supported, pivot table will be represented as separate entity\n+- in mysql, tinyint columns will be defined as boolean properties\n", "entity-helper.md": "@@ -0,0 +1,119 @@\n+---\n+title: EntityHelper and Decorated Entities\n+sidebar_label: Updating Entity Values\n+---\n+\n+## Updating Entity Values with `entity.assign()`\n+\n+When you want to update entity based on user input, you will usually have just plain\n+string ids of entity relations as user input. Normally you would need to use \n+`em.getReference()` to create references from each id first, and then\n+use those references to update entity relations:\n+\n+```typescript\n+const jon = new Author('Jon Snow', '[email protected]');\n+const book = new Book('Book', jon);\n+book.author = orm.em.getReference<Author>(Author, '...id...');\n+```\n+\n+Same result can be easily achieved with `entity.assign()`:\n+\n+```typescript\n+import { wrap } from '@mikro-orm/core';\n+\n+wrap(book).assign({ \n+ title: 'Better Book 1', \n+ author: '...id...',\n+});\n+console.log(book.title); // 'Better Book 1'\n+console.log(book.author); // instance of Author with id: '...id...'\n+console.log(book.author.id); // '...id...'\n+```\n+\n+To use `entity.assign()` on not managed entities, you need to provide `EntityManager` \n+instance explicitly: \n+\n+```typescript\n+import { wrap } from '@mikro-orm/core';\n+\n+const book = new Book();\n+wrap(book).assign({ \n+ title: 'Better Book 1', \n+ author: '...id...',\n+}, { em: orm.em });\n+```\n+\n+By default, `entity.assign(data)` behaves same way as `Object.assign(entity, data)`, \n+e.g. it does not merge things recursively. To enable deep merging of object properties, \n+use second parameter to enable `mergeObjects` flag:\n+\n+```typescript\n+import { wrap } from '@mikro-orm/core';\n+\n+book.meta = { foo: 1, bar: 2 };\n+\n+wrap(book).assign({ meta: { foo: 3 } }, { mergeObjects: true });\n+console.log(book.meta); // { foo: 3, bar: 2 }\n+\n+wrap(book).assign({ meta: { foo: 4 } });\n+console.log(book.meta); // { foo: 4 }\n+```\n+\n+## `WrappedEntity` and `wrap()` helper\n+\n+`IWrappedEntity` is an interface that defines public helper methods provided \n+by the ORM:\n+\n+```typescript\n+interface IWrappedEntity<T, PK extends keyof T> {\n+ isInitialized(): boolean;\n+ populated(populated?: boolean): void;\n+ init(populated?: boolean, lockMode?: LockMode): Promise<T>;\n+ toReference(): IdentifiedReference<T, PK>;\n+ toObject(ignoreFields?: string[]): Dictionary;\n+ toJSON(...args: any[]): Dictionary;\n+ assign(data: any, options?: AssignOptions | boolean): T;\n+}\n+```\n+\n+There are two ways to access those methods. You can either extend `BaseEntity` \n+(exported from `@mikro-orm/core`), that defines those methods, or use the \n+`wrap()` helper to access `WrappedEntity` instance, where those methods\n+exist.\n+\n+Users can choose whether they are fine with polluting the entity interface with \n+those additional methods, or they want to keep the interface clean \n+and use the `wrap(entity)` helper method instead to access them. \n+\n+> Since v4 `wrap(entity)` no longer returns the entity, now the `WrappedEntity` instance is \n+> being returned. It contains only public methods (`init`, `assign`, `isInitialized`, ...),\n+> if you want to access internal properties like `__meta` or `__em`, you need to explicitly\n+> ask for the helper via `wrap(entity, true)`.\n+\n+```typescript\n+import { BaseEntity } from '@mikro-orm/core';\n+\n+@Entity()\n+export class Book extends BaseEntity<Book, 'id'> { ... }\n+```\n+\n+Then you can work with those methods directly:\n+\n+```typescript\n+book.meta = { foo: 1, bar: 2 };\n+book.assign({ meta: { foo: 3 } }, { mergeObjects: true });\n+console.log(book.meta); // { foo: 3, bar: 2 }\n+```\n+\n+### Accessing internal prefixed properties\n+\n+Previously it was possible to access internal properties like `__meta` or `__em` \n+from the `wrap()` helper. Now to access them, you need to use second parameter of\n+wrap:\n+\n+```typescript\n+@Entity()\n+export class Author { ... }\n+\n+console.log(wrap(author, true).__meta);\n+```\n", "entity-manager-api.md": "@@ -0,0 +1,342 @@\n+---\n+title: EntityManager API\n+---\n+\n+#### `getDriver(): IDatabaseDriver`\n+\n+Gets the Driver instance used by this EntityManager\n+\n+----\n+\n+#### `getConnection(type?: 'read' | 'write'): ReturnType<D['getConnection']>`\n+\n+Gets the Connection instance, by default returns write connection\n+\n+----\n+\n+#### `getRepository(entityName: EntityName<T>): GetRepository<T, U>`\n+\n+Returns `EntityRepository` for given entity, respects `customRepository` option of `@Entity`\n+and `entityRepository` option of `MikroORM.init()`.\n+\n+----\n+\n+#### `getValidator(): EntityValidator`\n+\n+Gets EntityValidator instance\n+\n+----\n+\n+#### `find(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOptions<T, P>): Promise<Loaded<T, P>[]>`\n+\n+Finds all entities matching your `where` query. You can pass additional options via the `options` parameter.\n+\n+```typescript\n+export interface FindOptions<T, P extends Populate<T> = Populate<T>> {\n+ populate?: P;\n+ orderBy?: QueryOrderMap;\n+ limit?: number;\n+ offset?: number;\n+ refresh?: boolean;\n+ convertCustomTypes?: boolean;\n+ fields?: string[];\n+ schema?: string;\n+ flags?: QueryFlag[];\n+ groupBy?: string | string[];\n+ having?: QBFilterQuery<T>;\n+ strategy?: LoadStrategy;\n+ filters?: Dictionary<boolean | Dictionary> | string[] | boolean;\n+}\n+```\n+\n+----\n+\n+#### `find(entityName: EntityName<T>, where: FilterQuery<T>, populate?: P, orderBy?: QueryOrderMap, limit?: number, offset?: number): Promise<Loaded<T, P>[]>`\n+\n+Finds all entities matching your `where` query.\n+Same as previous `find` method, just with dedicated parameters for `populate`, `orderBy`, `limit`\n+and `offset`.\n+\n+----\n+\n+#### `addFilter(name: string, cond: FilterQuery<T> | ((args: Dictionary) => FilterQuery<T>), entityName?: EntityName<T>[], enabled?: boolean): void`\n+\n+----\n+\n+#### `setFilterParams(name: string, args: Dictionary): void`\n+\n+----\n+\n+#### `getFilterParams<T extends Dictionary = Dictionary>(name: string): T`\n+\n+----\n+\n+#### `findAndCount(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOptions<T, P>): Promise<[Loaded<T, P>[], number]>`\n+\n+Calls `em.find()` and `em.count()` with the same arguments (where applicable) and returns the results as tuple\n+where first element is the array of entities and the second is the count.\n+\n+----\n+\n+#### `findAndCount(entityName: EntityName<T>, where: FilterQuery<T>, populate?: P, orderBy?: QueryOrderMap, limit?: number, offset?: number): Promise<[Loaded<T, P>[], number]>`\n+\n+Calls `em.find()` and `em.count()` with the same arguments (where applicable) and returns the results as tuple\n+where first element is the array of entities and the second is the count.\n+\n+----\n+\n+#### `findOne(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOneOptions<T, P>): Promise<Loaded<T, P> | null>`\n+\n+Finds first entity matching your `where` query.\n+\n+----\n+\n+#### `findOne(entityName: EntityName<T>, where: FilterQuery<T>, populate?: P, orderBy?: QueryOrderMap): Promise<Loaded<T, P> | null>`\n+\n+Finds first entity matching your `where` query.\n+\n+----\n+\n+#### `findOneOrFail(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOneOrFailOptions<T, P>): Promise<Loaded<T, P>>`\n+\n+Finds first entity matching your `where` query. If nothing found, it will throw an error.\n+You can override the factory for creating this method via `options.failHandler` locally\n+or via `Configuration.findOneOrFailHandler` globally.\n+\n+Finds first entity matching your `where` query. If nothing found, it will throw an error.\n+\n+----\n+\n+#### `findOneOrFail(entityName: EntityName<T>, where: FilterQuery<T>, populate?: P, orderBy?: QueryOrderMap): Promise<Loaded<T, P>>`\n+\n+You can override the factory for creating this method via `options.failHandler` locally\n+or via `Configuration.findOneOrFailHandler` globally.\n+\n+----\n+\n+#### `transactional(cb: (em: D[typeof EntityManagerType]) => Promise<T>, ctx?: any): Promise<T>`\n+\n+Runs your callback wrapped inside a database transaction.\n+\n+----\n+\n+#### `begin(ctx?: Transaction): Promise<void>`\n+\n+Starts new transaction bound to this EntityManager. Use `ctx` parameter to provide the parent when nesting transactions.\n+\n+----\n+\n+#### `commit(): Promise<void>`\n+\n+Commits the transaction bound to this EntityManager. Flushes before doing the actual commit query.\n+\n+----\n+\n+#### `rollback(): Promise<void>`\n+\n+Rollbacks the transaction bound to this EntityManager.\n+\n+----\n+\n+#### `lock(entity: AnyEntity, lockMode: LockMode, lockVersion?: number | Date): Promise<void>`\n+\n+Runs your callback wrapped inside a database transaction.\n+\n+----\n+\n+#### `nativeInsert(entity: T): Promise<Primary<T>>`\n+\n+Fires native insert query. Calling this has no side effects on the context (identity map).\n+\n+----\n+\n+#### `nativeInsert(entityName: EntityName<T>, data: EntityData<T>): Promise<Primary<T>>`\n+\n+Fires native insert query. Calling this has no side effects on the context (identity map).\n+\n+----\n+\n+#### `nativeUpdate(entityName: EntityName<T>, where: FilterQuery<T>, data: EntityData<T>, options?: UpdateOptions<T>): Promise<number>`\n+\n+Fires native update query. Calling this has no side effects on the context (identity map).\n+\n+----\n+\n+#### `nativeDelete(entityName: EntityName<T>, where: FilterQuery<T>, options?: DeleteOptions<T>): Promise<number>`\n+\n+Fires native delete query. Calling this has no side effects on the context (identity map).\n+\n+----\n+\n+#### `map(entityName: EntityName<T>, result: EntityData<T>): T`\n+\n+Maps raw DB result to entity, adding it to current Identity Map. Equivalent to \n+`driver.mapResult()` followed by `em.merge()`.\n+\n+----\n+\n+#### `merge(entity: T, refresh?: boolean): T`\n+\n+Merges given entity to this EntityManager so it becomes managed. You can force refreshing of existing entities\n+via second parameter. By default it will return already loaded entities without modifying them.\n+\n+This is useful when you want to work with cached entities. \n+\n+----\n+\n+#### `merge(entityName: EntityName<T>, data: EntityData<T>, refresh?: boolean, convertCustomTypes?: boolean): T`\n+\n+Merges given entity to this EntityManager so it becomes managed. You can force refreshing of existing entities\n+via second parameter. By default it will return already loaded entities without modifying them.\n+\n+----\n+\n+#### `create(entityName: EntityName<T>, data: EntityData<T>): New<T, P>`\n+\n+Creates new instance of given entity and populates it with given data\n+\n+----\n+\n+#### `assign(entity: T, data: EntityData<T>, options?: AssignOptions): T`\n+\n+Shortcut for `wrap(entity).assign(data, { em })`\n+\n+----\n+\n+#### `getReference(entityName: EntityName<T>, id: Primary<T>, wrapped?: boolean, convertCustomTypes?: boolean): T | Reference<T>`\n+\n+Gets a reference to the entity identified by the given type and identifier without actually loading it, if the entity is not yet loaded\n+\n+----\n+\n+#### `count(entityName: EntityName<T>, where?: FilterQuery<T>, options?: CountOptions<T>): Promise<number>`\n+\n+Returns total number of entities matching your `where` query.\n+\n+----\n+\n+#### `persist(entity: AnyEntity | Reference<AnyEntity> | (AnyEntity | Reference<AnyEntity>)[]): this`\n+\n+Tells the EntityManager to make an instance managed and persistent.\n+The entity will be entered into the database at or before transaction commit or as a result of the flush operation.\n+\n+----\n+\n+#### `persistAndFlush(entity: AnyEntity | Reference<AnyEntity> | (AnyEntity | Reference<AnyEntity>)[]): Promise<void>`\n+\n+Persists your entity immediately, flushing all not yet persisted changes to the database too.\n+Equivalent to `em.persist(e).flush()`.\n+\n+----\n+\n+#### `persistLater(entity: AnyEntity | AnyEntity[]): void`\n+\n+> deprecated, use `persist()`\n+\n+Tells the EntityManager to make an instance managed and persistent.\n+The entity will be entered into the database at or before transaction commit or as a result of the flush operation.\n+\n+----\n+\n+#### `remove(entity: T | Reference<T> | (T | Reference<T>)[]): this`\n+\n+Marks entity for removal.\n+A removed entity will be removed from the database at or before transaction commit or as a result of the flush operation.\n+\n+This method fires `beforeDelete` and `afterDelete` hooks. \n+\n+To remove entities by condition, use `em.nativeDelete()`.\n+\n+----\n+\n+#### `removeAndFlush(entity: AnyEntity | Reference<AnyEntity>): Promise<void>`\n+\n+Removes an entity instance immediately, flushing all not yet persisted changes to the database too.\n+Equivalent to `em.remove(e).flush()`\n+\n+This method fires `beforeDelete` and `afterDelete` hooks. \n+\n+----\n+\n+#### `removeLater(entity: AnyEntity): void`\n+\n+> deprecated use `remove()`\n+\n+Marks entity for removal.\n+A removed entity will be removed from the database at or before transaction commit or as a result of the flush operation.\n+\n+----\n+\n+#### `flush(): Promise<void>`\n+\n+Flushes all changes to objects that have been queued up to now to the database.\n+This effectively synchronizes the in-memory state of managed objects with the database.\n+\n+----\n+\n+#### `clear(): void`\n+\n+Clears the EntityManager. All entities that are currently managed by this EntityManager become detached.\n+\n+----\n+\n+#### `canPopulate(entityName: EntityName<T>, property: string): boolean`\n+\n+Checks whether given property can be populated on the entity.\n+\n+----\n+\n+#### `populate(entities: T | T[], populate: P, where?: FilterQuery<T>, orderBy?: QueryOrderMap, refresh?: boolean, validate?: boolean): Promise<Loaded<T, P> | Loaded<T, P>[]>`\n+\n+Populate existing entities. Supports nested (conditional) populating.\n+\n+----\n+\n+#### `fork(clear?: boolean, useContext?: boolean): D[typeof EntityManagerType]`\n+\n+Returns new EntityManager instance with its own identity map\n+\n+```\n+@param clear do we want clear identity map? defaults to true\n+@param useContext use request context? should be used only for top level request scope EM, defaults to false\n+```\n+\n+----\n+\n+#### `getUnitOfWork(): UnitOfWork`\n+\n+Gets the UnitOfWork used by the EntityManager to coordinate operations.\n+\n+----\n+\n+#### `getEntityFactory(): EntityFactory`\n+\n+Gets the EntityFactory used by the EntityManager.\n+\n+----\n+\n+#### `getEventManager(): EventManager`\n+\n+----\n+\n+#### `isInTransaction(): boolean`\n+\n+Checks whether this EntityManager is currently operating inside a database transaction.\n+\n+----\n+\n+#### `getTransactionContext<T extends Transaction = Transaction>(): T | undefined`\n+\n+Gets the transaction context (driver dependent object used to make sure queries are executed on same connection).\n+\n+----\n+\n+#### `getMetadata(): MetadataStorage`\n+\n+Gets the MetadataStorage.\n+\n+----\n+\n+#### `getComparator(): EntityComparator`\n+\n+----\n", "entity-manager.md": "@@ -0,0 +1,261 @@\n+---\n+title: Working with Entity Manager\n+sidebar_label: Entity Manager\n+---\n+\n+## Persist and Flush\n+\n+There are 2 methods we should first describe to understand how persisting works in MikroORM: \n+`em.persist()` and `em.flush()`.\n+\n+`em.persist(entity)` is used to mark new entities for future persisting. \n+It will make the entity managed by given `EntityManager` and once `flush` will be called, it \n+will be written to the database. \n+\n+To understand `flush`, lets first define what managed entity is: An entity is managed if \n+it\u2019s fetched from the database (via `em.find()`, `em.findOne()` or via other managed entity) \n+or registered as new through `em.persist()`.\n+\n+`em.flush()` will go through all managed entities, compute appropriate change sets and \n+perform according database queries. As an entity loaded from database becomes managed \n+automatically, you do not have to call persist on those, and flush is enough to update \n+them.\n+\n+```typescript\n+const book = await orm.em.findOne(Book, 1);\n+book.title = 'How to persist things...';\n+\n+// no need to persist `book` as its already managed by the EM\n+await orm.em.flush();\n+```\n+\n+## Persisting and Cascading\n+\n+To save entity state to database, you need to persist it. Persist takes care or deciding \n+whether to use `insert` or `update` and computes appropriate change-set. Entity references\n+that are not persisted yet (does not have identifier) will be cascade persisted automatically. \n+\n+```typescript\n+// use constructors in your entities for required parameters\n+const author = new Author('Jon Snow', '[email protected]');\n+author.born = new Date();\n+\n+const publisher = new Publisher('7K publisher');\n+\n+const book1 = new Book('My Life on The Wall, part 1', author);\n+book1.publisher = publisher;\n+const book2 = new Book('My Life on The Wall, part 2', author);\n+book2.publisher = publisher;\n+const book3 = new Book('My Life on The Wall, part 3', author);\n+book3.publisher = publisher;\n+\n+// just persist books, author and publisher will be automatically cascade persisted\n+await orm.em.persistAndFlush([book1, book2, book3]);\n+\n+// or one by one\n+orm.em.persist(book1);\n+orm.em.persist(book2);\n+orm.em.persist(book3); \n+await orm.em.flush(); // flush everything to database at once\n+```\n+\n+## Fetching Entities with EntityManager\n+\n+To fetch entities from database you can use `find()` and `findOne()` of `EntityManager`: \n+\n+Example:\n+\n+```typescript\n+const author = await orm.em.findOne(Author, '...id...');\n+const books = await orm.em.find(Book, {});\n+\n+for (const author of authors) {\n+ console.log(author.name); // Jon Snow\n+\n+ for (const book of author.books) {\n+ console.log(book.title); // initialized\n+ console.log(book.author.isInitialized()); // true\n+ console.log(book.author.id);\n+ console.log(book.author.name); // Jon Snow\n+ console.log(book.publisher); // just reference\n+ console.log(book.publisher.isInitialized()); // false\n+ console.log(book.publisher.id);\n+ console.log(book.publisher.name); // undefined\n+ }\n+}\n+```\n+\n+To populate entity relations, you can use `populate` parameter.\n+\n+```typescript\n+const books = await orm.em.find(Book, { foo: 1 }, ['author.friends']);\n+```\n+\n+You can also use `em.populate()` helper to populate relations (or to ensure they \n+are fully populated) on already loaded entities. This is also handy when loading \n+entities via `QueryBuilder`:\n+\n+```typescript\n+const authors = await orm.em.createQueryBuilder(Author).select('*').getResult();\n+await em.populate(authors, ['books.tags']);\n+\n+// now your Author entities will have `books` collections populated, \n+// as well as they will have their `tags` collections populated.\n+console.log(authors[0].books[0].tags[0]); // initialized BookTag\n+```\n+\n+### Conditions Object (`FilterQuery<T>`)\n+\n+Querying entities via conditions object (`where` in `em.find(Entity, where: FilterQuery<T>)`) \n+supports many different ways:\n+\n+```typescript\n+// search by entity properties\n+const users = await orm.em.find(User, { firstName: 'John' });\n+\n+// for searching by reference you can use primary key directly\n+const id = 1;\n+const users = await orm.em.find(User, { organization: id });\n+\n+// or pass unpopulated reference (including `Reference` wrapper)\n+const ref = await orm.em.getReference(Organization, id);\n+const users = await orm.em.find(User, { organization: ref });\n+\n+// fully populated entities as also supported\n+const ent = await orm.em.findOne(Organization, id);\n+const users = await orm.em.find(User, { organization: ent });\n+\n+// complex queries with operators\n+const users = await orm.em.find(User, { $and: [{ id: { $nin: [3, 4] } }, { id: { $gt: 2 } }] });\n+\n+// you can also search for array of primary keys directly\n+const users = await orm.em.find(User, [1, 2, 3, 4, 5]);\n+\n+// and in findOne all of this works, plus you can search by single primary key\n+const user1 = await orm.em.findOne(User, 1);\n+```\n+\n+As you can see in the fifth example, one can also use operators like `$and`, `$or`, `$gte`, \n+`$gt`, `$lte`, `$lt`, `$in`, `$nin`, `$eq`, `$ne`, `$like`, `$re`. More about that can be found in \n+[Query Conditions](query-conditions.md) section. \n+\n+#### Mitigating `Type instantiation is excessively deep and possibly infinite.ts(2589)` error\n+\n+Sometimes you might be facing TypeScript errors caused by too complex query for it to \n+properly infer all types. Usually it can be solved by providing the type argument \n+explicitly.\n+\n+You can also opt in to use repository instead, as there the type inference should not be\n+problematic. \n+\n+> As a last resort, you can always type cast the query to `any`.\n+\n+```typescript\n+const books = await orm.em.find<Book>(Book, { ... your complex query ... });\n+// or\n+const books = await orm.em.getRepository(Book).find({ ... your complex query ... });\n+// or\n+const books = await orm.em.find<any>(Book, { ... your complex query ... }) as Book[];\n+```\n+\n+Another problem you might be facing is `RangeError: Maximum call stack size exceeded` error \n+thrown during TypeScript compilation (usually from file `node_modules/typescript/lib/typescript.js`).\n+The solution to this is the same, just provide the type argument explicitly.\n+\n+### Searching by referenced entity fields\n+\n+You can also search by referenced entity properties. Simply pass nested where condition like \n+this and all requested relationships will be automatically joined. Currently it will only join \n+them so you can search and sort by those. To populate entities, do not forget to pass the populate \n+parameter as well. \n+\n+```typescript\n+// find author of a book that has tag specified by name\n+const author = await orm.em.findOne(Author, { books: { tags: { name: 'Tag name' } } });\n+console.log(author.books.isInitialized()); // false, as it only works for query and sort\n+\n+const author = await orm.em.findOne(Author, { books: { tags: { name: 'Tag name' } } }, ['books.tags']);\n+console.log(author.books.isInitialized()); // true, because it was populated\n+console.log(author.books[0].tags.isInitialized()); // true, because it was populated\n+console.log(author.books[0].tags[0].isInitialized()); // true, because it was populated\n+```\n+\n+> This feature is fully available only for SQL drivers. In MongoDB always you need to \n+> query from the owning side - so in the example above, first load book tag by name,\n+> then associated book, then the author. Another option is to denormalize the schema. \n+\n+### Fetching Partial Entities\n+\n+When fetching single entity, you can choose to select only parts of an entity via `options.fields`:\n+\n+```typescript\n+const author = await orm.em.findOne(Author, '...', { fields: ['name', 'born'] });\n+console.log(author.id); // PK is always selected\n+console.log(author.name); // Jon Snow\n+console.log(author.email); // undefined\n+```\n+\n+### Fetching Paginated Results\n+\n+If you are going to paginate your results, you can use `em.findAndCount()` that will return\n+total count of entities before applying limit and offset.\n+\n+```typescript\n+const [authors, count] = await orm.em.findAndCount(Author, { ... }, { limit: 10, offset: 50 });\n+console.log(authors.length); // based on limit parameter, e.g. 10\n+console.log(count); // total count, e.g. 1327\n+```\n+\n+### Handling Not Found Entities\n+\n+When you call `em.findOne()` and no entity is found based on your criteria, `null` will be \n+returned. If you rather have an `Error` instance thrown, you can use `em.findOneOrFail()`:\n+\n+```typescript\n+const author = await orm.em.findOne(Author, { name: 'does-not-exist' });\n+console.log(author === null); // true\n+\n+try {\n+ const author = await orm.em.findOneOrFail(Author, { name: 'does-not-exist' });\n+ // author will be always found here\n+} catch (e) {\n+ console.error('Not found', e);\n+}\n+```\n+\n+You can customize the error either globally via `findOneOrFailHandler` option, or locally via \n+`failHandler` option in `findOneOrFail` call.\n+\n+```typescript\n+try {\n+ const author = await orm.em.findOneOrFail(Author, { name: 'does-not-exist' }, {\n+ failHandler: (entityName: string, where: Record<string, any> | IPrimaryKey) => new Error(`Failed: ${entityName} in ${util.inspect(where)}`)\n+ });\n+} catch (e) {\n+ console.error(e); // your custom error\n+}\n+```\n+\n+## Type of Fetched Entities\n+\n+Both `em.find` and `em.findOne()` methods have generic return types.\n+All of following examples are equal and will let typescript correctly infer the entity type:\n+\n+```typescript\n+const author1 = await orm.em.findOne<Author>(Author.name, '...id...');\n+const author2 = await orm.em.findOne<Author>('Author', '...id...');\n+const author3 = await orm.em.findOne(Author, '...id...');\n+```\n+\n+As the last one is the least verbose, it should be preferred. \n+\n+## Entity Repositories\n+\n+Although you can use `EntityManager` directly, much more convenient way is to use \n+[`EntityRepository` instead](https://mikro-orm.io/repositories/). You can register\n+your repositories in dependency injection container like [InversifyJS](http://inversify.io/)\n+so you do not need to get them from `EntityManager` each time.\n+\n+For more examples, take a look at\n+[`tests/EntityManager.mongo.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts)\n+or [`tests/EntityManager.mysql.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mysql.test.ts).\n", "entity-references.md": "@@ -0,0 +1,197 @@\n+---\n+title: Entity References\n+sidebar_label: Entity References and Reference<T> Wrapper\n+---\n+\n+Every single entity relation is mapped to an entity reference. Reference is an entity that has\n+only its identifier. This reference is stored in identity map so you will get the same object \n+reference when fetching the same document from database.\n+\n+You can call `await entity.init()` to initialize the entity. This will trigger database call \n+and populate itself, keeping the same reference in identity map. \n+\n+```typescript\n+const author = orm.em.getReference('...id...');\n+console.log(author.id); // accessing the id will not trigger any db call\n+console.log(author.isInitialized()); // false\n+console.log(author.name); // undefined\n+\n+await author.init(); // this will trigger db call\n+console.log(author.isInitialized()); // true\n+console.log(author.name); // defined\n+```\n+\n+## Better Type-safety with `Reference<T>` Wrapper\n+\n+When you define `@ManyToOne` and `@OneToOne` properties on your entity, TypeScript compiler\n+will think that desired entities are always loaded:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @ManyToOne()\n+ author!: Author;\n+\n+ constructor(author: Author) {\n+ this.author = author;\n+ }\n+\n+}\n+\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author instanceof Author); // true\n+console.log(book.author.isInitialized()); // false\n+console.log(book.author.name); // undefined as `Author` is not loaded yet\n+```\n+\n+You can overcome this issue by using the `Reference<T>` wrapper. It simply wraps the entity, \n+defining `load(): Promise<T>` method that will first lazy load the association if not already\n+available. You can also use `unwrap(): T` method to access the underlying entity without loading\n+it.\n+\n+You can also use `get<K extends keyof T>(prop: K): Promise<T[K]>` helper, that will call `load()` \n+for you, making sure the entity is initialized first, then returning the value of given property \n+directly. \n+\n+```typescript\n+import { Entity, IdentifiedReference, ManyToOne, PrimaryKey, Reference } from '@mikro-orm/core';\n+\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @ManyToOne()\n+ author!: IdentifiedReference<Author>;\n+\n+ constructor(author: Author) {\n+ this.author = Reference.create(author);\n+ }\n+\n+}\n+\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author instanceof Reference); // true\n+console.log(book.author.isInitialized()); // false\n+console.log(book.author.name); // type error, there is no `name` property\n+console.log(book.author.unwrap().name); // undefined as author is not loaded\n+console.log((await book.author.get('name'))); // ok, loading the author first\n+console.log((await book.author.load()).name); // ok, author already loaded\n+console.log(book.author.unwrap().name); // ok, author already loaded\n+```\n+\n+There are also `getEntity()` and `getProperty()` methods that are synchronous getters, \n+that will first check if the wrapped entity is initialized, and if not, it will throw \n+and error.\n+\n+```typescript\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author instanceof Reference); // true\n+console.log(book.author.isInitialized()); // false\n+console.log(book.author.getEntity()); // Error: Reference<Author> 123 not initialized\n+console.log(book.author.getProperty('name')); // Error: Reference<Author> 123 not initialized\n+console.log((await book.author.get('name'))); // ok, loading the author first\n+console.log(book.author.getProperty('name')); // ok, author already loaded\n+```\n+\n+If you use different metadata provider than `TsMorphMetadataProvider` \n+(e.g. `ReflectMetadataProvider`), you will also need to explicitly set `wrappedReference` \n+parameter:\n+\n+```typescript\n+@ManyToOne(() => Author, { wrappedReference: true })\n+author!: IdentifiedReference<Author>;\n+```\n+\n+### Assigning to Reference Properties\n+\n+When you define the property as `Reference` wrapper, you will need to assign the `Reference`\n+to it instead of the entity. You can create it via `Reference.create()` factory, or use `wrapped`\n+parameter of `em.getReference()`:\n+\n+```typescript\n+const book = await orm.em.findOne(Book, 1);\n+const repo = orm.em.getRepository(Author);\n+\n+book.author = repo.getReference(2, true);\n+\n+// same as:\n+book.author = Reference.create(repo.getReference(2));\n+await orm.em.flush();\n+```\n+\n+Another way is to use `toReference()` method available as part of \n+[`WrappedEntity` interface](entity-helper.md#wrappedentity-and-wrap-helper):\n+\n+```typescript\n+const author = new Author(...)\n+book.author = wrap(author).toReference();\n+```\n+\n+If the reference already exist, you can also re-assign to it via `set()` method:\n+\n+```typescript\n+book.author.set(new Author(...));\n+```\n+\n+### What is IdentifiedReference?\n+\n+`IdentifiedReference` is an intersection type that adds primary key property to the `Reference` \n+interface. It allows to get the primary key from `Reference` instance directly.\n+\n+By default it defines the PK property as `id`, you can override this via second generic type\n+argument.\n+\n+```typescript\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author.id); // ok, returns the PK\n+```\n+\n+You can also have non-standard primary key like `uuid`:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @ManyToOne()\n+ author!: IdentifiedReference<Author, 'uuid'>;\n+\n+}\n+\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author.uuid); // ok, returns the PK\n+```\n+\n+For MongoDB, defined the PK generic type argument as `'id' | '_id'` to access both `string` \n+and `ObjectId` PK values:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey()\n+ _id!: ObjectId;\n+\n+ @SerializedPrimaryKey()\n+ id!: string;\n+\n+ @ManyToOne()\n+ author!: IdentifiedReference<Author, 'id' | '_id'>;\n+\n+}\n+\n+const book = await orm.em.findOne(Book, 1);\n+console.log(book.author.id); // ok, returns string PK\n+console.log(book.author._id); // ok, returns ObjectId PK\n+```\n+\n+> As opposed to `Entity.init()` which always refreshes the entity, `Reference.load()` \n+> method will query the database only if the entity is not already loaded in Identity Map. \n", "entity-schema.md": "@@ -0,0 +1,179 @@\n+---\n+title: Defining Entities via EntitySchema\n+---\n+\n+With `EntitySchema` helper you define the schema programmatically. \n+\n+```typescript title=\"./entities/Book.ts\"\n+export interface Book extends BaseEntity {\n+ title: string;\n+ author: Author;\n+ publisher: Publisher;\n+ tags: Collection<BookTag>;\n+}\n+\n+export const schema = new EntitySchema<Book, BaseEntity>({\n+ name: 'Book',\n+ extends: 'BaseEntity',\n+ properties: {\n+ title: { type: 'string' },\n+ author: { reference: 'm:1', entity: 'Author', inversedBy: 'books' },\n+ publisher: { reference: 'm:1', entity: 'Publisher', inversedBy: 'books' },\n+ tags: { reference: 'm:n', entity: 'BookTag', inversedBy: 'books', fixedOrder: true },\n+ },\n+});\n+```\n+\n+When creating new entity instances, you will need to use `em.create()` method that will\n+create instance of internally created class. \n+\n+```typescript\n+const repo = em.getRepository<Author>('Author');\n+const author = repo.create('Author', { name: 'name', email: 'email' }); // instance of internal Author class\n+await repo.persistAndFlush(author);\n+```\n+\n+> Using this approach, metadata caching is automatically disabled as it is not needed.\n+\n+## Using custom entity classes\n+\n+You can optionally use custom class for entity instances. \n+\n+```typescript title=\"./entities/Author.ts\"\n+export class Author extends BaseEntity {\n+ name: string;\n+ email: string;\n+ age?: number;\n+ termsAccepted?: boolean;\n+ identities?: string[];\n+ born?: Date;\n+ books = new Collection<Book>(this);\n+ favouriteBook?: Book;\n+ version?: number;\n+ \n+ constructor(name: string, email: string) {\n+ this.name = name;\n+ this.email = email;\n+ }\n+}\n+\n+export const schema = new EntitySchema<Author, BaseEntity>({\n+ class: Author,\n+ properties: {\n+ name: { type: 'string' },\n+ email: { type: 'string', unique: true },\n+ age: { type: 'number', nullable: true },\n+ termsAccepted: { type: 'boolean', default: 0, onCreate: () => false },\n+ identities: { type: 'string[]', nullable: true },\n+ born: { type: DateType, nullable: true, length: 3 },\n+ books: { reference: '1:m', entity: () => 'Book', mappedBy: book => book.author },\n+ favouriteBook: { reference: 'm:1', type: 'Book' },\n+ version: { type: 'number', persist: false },\n+ },\n+});\n+```\n+\n+Then you can use the entity class as usual:\n+\n+```typescript\n+const repo = em.getRepository(Author);\n+const author = new Author('name', 'email');\n+await repo.persistAndFlush(author);\n+```\n+\n+## Using BaseEntity\n+\n+Do not forget that base entities needs to be discovered just like normal entities. \n+\n+```typescript title=\"./entities/BaseEntity.ts\"\n+export interface BaseEntity {\n+ id: number;\n+ createdAt: Date;\n+ updatedAt: Date;\n+}\n+\n+export const schema = new EntitySchema<BaseEntity>({\n+ name: 'BaseEntity',\n+ abstract: true,\n+ properties: {\n+ id: { type: 'number', primary: true },\n+ createdAt: { type: 'Date', onCreate: () => new Date(), nullable: true },\n+ updatedAt: { type: 'Date', onCreate: () => new Date(), onUpdate: () => new Date(), nullable: true },\n+ },\n+});\n+```\n+\n+## Configuration Reference\n+\n+The parameter of `EntitySchema` requires to provide either `name` or `class` parameters. \n+When using `class`, `extends` will be automatically inferred. You can optionally pass \n+these additional parameters:\n+\n+```typescript\n+name: string;\n+class: Constructor<T>;\n+extends: string;\n+tableName: string; // alias for `collection: string`\n+properties: { [K in keyof T & string]: EntityProperty<T[K]> };\n+indexes: { properties: string | string[]; name?: string; type?: string }[];\n+uniques: { properties: string | string[]; name?: string }[];\n+customRepository: () => Constructor<EntityRepository<T>>;\n+hooks: Partial<Record<HookType, (string & keyof T)[]>>;\n+abstract: boolean;\n+```\n+\n+Every property then needs to contain a type specification - one of `type`/`customType`/`entity`.\n+Here are some examples of various property types:\n+\n+```typescript\n+export enum MyEnum {\n+ LOCAL = 'local',\n+ GLOBAL = 'global',\n+}\n+\n+export const schema = new EntitySchema<FooBar>({\n+ name: 'FooBar',\n+ tableName: 'tbl_foo_bar',\n+ indexes: [{ name: 'idx1', properties: 'name' }],\n+ uniques: [{ name: 'unq1', properties: ['name', 'email'] }],\n+ customRepository: () => FooBarRepository,\n+ properties: {\n+ id: { type: 'number', primary: true },\n+ name: { type: 'string' },\n+ baz: { reference: '1:1', entity: 'FooBaz', orphanRemoval: true, nullable: true },\n+ fooBar: { reference: '1:1', entity: 'FooBar', nullable: true },\n+ publisher: { reference: 'm:1', entity: 'Publisher', inversedBy: 'books' },\n+ books: { reference: '1:m', entity: () => 'Book', mappedBy: book => book.author },\n+ tags: { reference: 'm:n', entity: 'BookTag', inversedBy: 'books', fixedOrder: true },\n+ version: { type: 'Date', version: true, length: 0 },\n+ type: { enum: true, items: () => MyEnum, default: MyEnum.LOCAL },\n+ },\n+});\n+```\n+\n+> As a value for `type` you can also use one of `String`/`Number`/`Boolean`/`Date`.\n+\n+## MongoDB example\n+\n+```typescript\n+export class BookTag {\n+ _id!: ObjectId;\n+ id!: string;\n+ name: string;\n+ books = new Collection<Book>(this);\n+\n+ constructor(name: string) {\n+ this.name = name;\n+ }\n+}\n+\n+export const schema = new EntitySchema<BookTag>({\n+ class: BookTag,\n+ properties: {\n+ _id: { type: 'ObjectId', primary: true },\n+ id: { type: 'string', serializedPrimaryKey: true },\n+ name: { type: 'string' },\n+ books: { reference: 'm:n', entity: () => Book, mappedBy: book => book.tags },\n+ },\n+});\n+```\n", "filters.md": "@@ -0,0 +1,161 @@\n+---\n+title: Filters\n+---\n+\n+MikroORM has the ability to pre-define filter criteria and attach those filters \n+to given entities. The application can then decide at runtime whether certain \n+filters should be enabled and what their parameter values should be. Filters \n+can be used like database views, but they are parameterized inside the application.\n+\n+> Filter can be defined at the entity level, dynamically via EM (global filters) \n+> or in the ORM configuration.\n+\n+Filters are applied to those methods of `EntityManager`: `find()`, `findOne()`, \n+`findAndCount()`, `findOneOrFail()`, `count()`, `nativeUpdate()` and `nativeDelete()`. \n+\n+> The `cond` parameter can be a callback, possibly asynchronous.\n+\n+```typescript\n+@Entity()\n+@Filter({ name: 'expensive', cond: { price: { $gt: 1000 } } })\n+@Filter({ name: 'long', cond: { 'length(text)': { $gt: 10000 } } })\n+@Filter({ name: 'hasAuthor', cond: { author: { $ne: null } }, default: true })\n+@Filter({ name: 'writtenBy', cond: args => ({ author: { name: args.name } }) })\n+export class Book {\n+ ...\n+}\n+\n+const books1 = await orm.em.find(Book, {}, {\n+ filters: ['long', 'expensive'],\n+});\n+const books2 = await orm.em.find(Book, {}, {\n+ filters: { hasAuthor: false, long: true, writtenBy: { name: 'God' } },\n+});\n+```\n+\n+## Parameters\n+\n+You can define the `cond` dynamically as a callback. This callback can be also \n+asynchronous. It will get two arguments:\n+\n+- `args` - dictionary of parameters provided by user\n+- `type` - type of operation that is being filtered, one of `'read'`, `'update'`, `'delete'`\n+\n+```typescript\n+@Entity()\n+@Filter({ name: 'writtenBy', cond: async (args, type) => {\n+ if (type === 'update') {\n+ return {}; // do not apply when updating\n+ }\n+\n+ return { author: { name: args.name } };\n+} })\n+export class Book {\n+ ...\n+}\n+\n+const books = await orm.em.find(Book, {}, {\n+ filters: { writtenBy: { name: 'God' } },\n+});\n+```\n+\n+### Filters without parameters\n+\n+If we want to have a filter condition that do not need arguments, but we want\n+to access the `type` parameter, we will need to explicitly set `args: false`, \n+otherwise error will be raised due to missing parameters:\n+\n+```ts\n+@Filter({\n+ name: 'withoutParams',\n+ cond(_, type) {\n+ return { ... };\n+ },\n+ args: false,\n+ default: true,\n+})\n+```\n+\n+## Global filters\n+\n+We can also register filters dynamically via `EntityManager` API. We call such filters \n+global. They are enabled by default (unless disabled via last parameter in `addFilter()`\n+method), and applied to all entities. You can limit the global filter to only specified\n+entities. \n+\n+> Filters as well as filter params set on the EM will be copied to all its forks.\n+\n+```typescript\n+// bound to entity, enabled by default\n+em.addFilter('writtenBy', args => ({ author: args.id }), Book);\n+\n+// global, enabled by default, for all entities\n+em.addFilter('tenant', args => { ... });\n+\n+// global, enabled by default, for only specified entities\n+em.addFilter('tenant', args => { ... }, [Author, Book]);\n+...\n+\n+// set params (probably in some middleware)\n+em.setFilterParams('tenant', { tenantId: 123 });\n+em.setFilterParams('writtenBy', { id: 321 });\n+```\n+\n+Global filters can be also registered via ORM configuration:\n+\n+```typescript\n+MikroORM.init({\n+ filters: { tenant: { cond: args => ({ tenant: args.tenant }), entity: ['Author', 'User'] } },\n+ ...\n+})\n+```\n+\n+## Using filters\n+\n+We can control what filters will be applied via `filter` parameter in `FindOptions`.\n+We can either provide an array of names of filters you want to enable, or options \n+object, where we can also disable a filter (that was enabled by default), or pass some\n+parameters to those that are expecting them.\n+\n+> By passing `filters: false` we can also disable all the filters for given call. \n+\n+```typescript\n+em.find(Book, {}); // same as `{ tenantId: 123 }`\n+em.find(Book, {}, { filters: ['writtenBy'] }); // same as `{ author: 321, tenantId: 123 }`\n+em.find(Book, {}, { filters: { tenant: false } }); // disabled tenant filter, so truly `{}`\n+em.find(Book, {}, { filters: false }); // disabled all filters, so truly `{}`\n+```\n+\n+## Filters and populating of relationships\n+\n+When populating relationships, filters will be applied only to the root entity of \n+given query, but not to those that are auto-joined. On the other hand, this means that\n+when you use the default loading strategy - `LoadStrategy.SELECT_IN` - filters will\n+be applied to every entity populated this way, as the child entities will become\n+root entities in their respective load calls.\n+\n+## Naming of filters\n+\n+When toggling filters via `FindOptions`, we do not care about the entity name. This\n+means that when you have multiple filters defined on different entities, but with \n+the same name, they will be controlled via single toggle in the `FindOptions`. \n+\n+```typescript\n+@Entity()\n+@Filter({ name: 'tenant', cond: args => ({ tenant: args.tenant }) })\n+export class Author {\n+ ...\n+}\n+\n+@Entity()\n+@Filter({ name: 'tenant', cond: args => ({ tenant: args.tenant }) })\n+export class Book {\n+ ...\n+}\n+\n+// this will apply the tenant filter to both Author and Book entities (with SELECT_IN loading strategy)\n+const authors = await orm.em.find(Author, {}, {\n+ populate: ['books'],\n+ filters: { tenant: 123 },\n+});\n+```\n", "identity-map.md": "@@ -0,0 +1,92 @@\n+---\n+title: Identity Map and Request Context\n+---\n+\n+`MikroORM` uses identity map in background so you will always get the same instance of \n+one entity.\n+\n+```typescript\n+const authorRepository = orm.em.getRepository(Author);\n+const jon = await authorRepository.findOne({ name: 'Jon Snow' }, ['books']);\n+const authors = await authorRepository.findAll(['books']);\n+\n+// identity map in action\n+console.log(jon === authors[0]); // true\n+```\n+\n+If you want to clear this identity map cache, you can do so via `em.clear()` method:\n+\n+```typescript\n+orm.em.clear();\n+```\n+\n+You should always keep unique identity map per each request. This basically means that you need \n+to clone entity manager and use the clone in request context. There are two ways to achieve this:\n+\n+## Forking Entity Manager\n+\n+With `fork()` method you can simply get clean entity manager with its own context and identity map:\n+\n+```typescript\n+const em = orm.em.fork();\n+```\n+\n+## <a name=\"request-context\"></a> RequestContext helper for DI containers\n+\n+If you use dependency injection container like `inversify` or the one in `nestjs` framework, it \n+can be hard to achieve this, because you usually want to access your repositories via DI container,\n+but it will always provide you with the same instance, rather than new one for each request. \n+\n+To solve this, you can use `RequestContext` helper, that will use `node`'s Domain API in the \n+background to isolate the request context. MikroORM will always use request specific (forked) \n+entity manager if available, so all you need to do is to create new request context preferably \n+as a middleware:\n+\n+```typescript\n+app.use((req, res, next) => {\n+ RequestContext.create(orm.em, next);\n+});\n+``` \n+\n+You should register this middleware as the last one just before request handlers and before\n+any of your custom middleware that is using the ORM. There might be issues when you register \n+it before request processing middleware like `queryParser` or `bodyParser`, so definitely \n+register the context after them. \n+\n+Later on you can then access the request scoped `EntityManager` via `RequestContext.getEntityManager()`.\n+This method is used under the hood automatically, so you should not need it. \n+\n+> `RequestContext.getEntityManager()` will return `undefinded` if the context was\n+> not started yet.\n+\n+## Why is Request Context needed?\n+\n+Imagine you will use single Identity Map throughout your application. It will be shared across \n+all request handlers, that can possibly run in parallel. \n+\n+### Problem 1 - growing memory footprint\n+\n+As there would be only one shared Identity Map, you can't just clear it after your request ends.\n+There can be another request working with it so clearing the Identity Map from one request could \n+break other requests running in parallel. This will result in growing memory footprint, as every \n+entity that became managed at some point in time would be kept in the Identity Map. \n+\n+### Problem 2 - unstable response of API endpoints\n+\n+Every entity has `toJSON()` method, that automatically converts it to serialized form. If you \n+have only one shared Identity Map, following situation may occur:\n+\n+Let's say there are 2 endpoints\n+\n+1. `GET /book/:id` that returns just the book, without populating anything\n+2. `GET /book-with-author/:id` that returns the book and its author populated\n+\n+Now when someone requests same book via both of those endpoints, you could end up with both \n+returning the same output:\n+ \n+1. `GET /book/1` returns `Book` without populating its property `author` property\n+2. `GET /book-with-author/1` returns `Book`, this time with `author` populated\n+3. `GET /book/1` returns `Book`, but this time also with `author` populated\n+\n+This happens because the information about entity association being populated is stored in\n+the Identity Map. \n", "index.md": "@@ -0,0 +1,57 @@\n+---\n+layout: homepage\n+title: MikroORM v4\n+hide_title: true\n+---\n+\n+## Table of contents\n+\n+- Overview\n+ - [Installation & Usage](installation.md)\n+ - [Defining Entities](defining-entities.md)\n+ - [Persisting, Cascading and Fetching Entities with `EntityManager`](entity-manager.md)\n+ - [Using `EntityRepository` instead of `EntityManager`](repositories.md)\n+- Fundamentals\n+ - [Identity Map and Request Context](identity-map.md)\n+ - [Entity References and `Reference<T>` Wrapper](entity-references.md)\n+ - [Using Entity Constructors](entity-constructors.md)\n+ - [Modelling Relationships](relationships.md)\n+ - [Collections](collections.md)\n+ - [Unit of Work](unit-of-work.md)\n+ - [Transactions](transactions.md)\n+ - [Cascading persist and remove](cascading.md)\n+ - [Filters](filters.md)\n+ - [Deployment](deployment.md)\n+- Advanced Features\n+ - [Smart Nested Populate](nested-populate.md)\n+ - [Smart Query Conditions](query-conditions.md)\n+ - [Using `QueryBuilder`](query-builder.md)\n+ - [Serializing](serializing.md)\n+ - [Updating Entity Values with `Entity.assign()`](entity-helper.md)\n+ - [Property Validation](property-validation.md)\n+ - [Lifecycle Hooks](lifecycle-hooks.md)\n+ - [Loading Strategies](loading-strategies.md)\n+ - [Naming Strategy](naming-strategy.md)\n+ - [Metadata Providers](metadata-providers.md)\n+ - [Metadata Cache](metadata-cache.md)\n+ - [Debugging](debugging.md)\n+ - [Schema Generator](schema-generator.md)\n+ - [Entity Generator](entity-generator.md)\n+ - [Migrations](migrations.md)\n+ - [Read Replica Connections](read-connections.md)\n+- Usage with Different Drivers\n+ - [Usage with SQL Drivers](usage-with-sql.md)\n+ - [Usage with MongoDB](usage-with-mongo.md)\n+- Recipes\n+ - [Usage with NestJS](usage-with-nestjs.md)\n+ - [Usage with Vanilla JS](usage-with-js.md)\n+ - [Creating Custom Driver](custom-driver.md)\n+ - [Using Multiple Schemas](multiple-schemas.md)\n+- Example Integrations\n+ - [Express + MongoDB + TypeScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-ts)\n+ - [Nest + MySQL + TypeScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/nest)\n+ - [RealWorld example app (Nest + MySQL)](https://github.com/mikro-orm/nestjs-realworld-example-app)\n+ - [Express + MongoDB + JavaScript](https://github.com/mikro-orm/mikro-orm-examples/tree/master/express-js)\n+ - [GraphQL + PostgreSQL + Typescript](https://github.com/driescroons/mikro-orm-graphql-example)\n+ - [Inversify + PostgreSQL](https://github.com/PodaruDragos/inversify-example-app)\n+ - [NextJS + MySQL](https://github.com/jonahallibone/mikro-orm-nextjs)\n", "inheritance-mapping.md": "@@ -0,0 +1,228 @@\n+---\n+title: Inheritance Mapping\n+---\n+\n+## Mapped Superclasses\n+\n+A mapped superclass is an abstract or concrete class that provides persistent entity state and \n+mapping information for its subclasses, but which is not itself an entity. Typically, the purpose \n+of such a mapped superclass is to define state and mapping information that is common to multiple \n+entity classes.\n+\n+Mapped superclasses, just as regular, non-mapped classes, can appear in the middle of an otherwise \n+mapped inheritance hierarchy (through Single Table Inheritance).\n+\n+> A mapped superclass cannot be an entity, it is not query-able and persistent relationships defined \n+> by a mapped superclass must be unidirectional (with an owning side only). This means that One-To-Many \n+> associations are not possible on a mapped superclass at all. Furthermore Many-To-Many associations \n+> are only possible if the mapped superclass is only used in exactly one entity at the moment. For \n+> further support of inheritance, the single table inheritance features have to be used.\n+\n+```typescript\n+// do not use @Entity decorator on base classes (mapped superclasses)\n+// we can also use @Entity({ abstract: true })\n+export abstract class Person {\n+\n+ @Property()\n+ mapped1!: number;\n+\n+ @Property()\n+ mapped2!: string;\n+ \n+ @OneToOne()\n+ toothbrush!: Toothbrush;\n+\n+ // ... more fields and methods\n+}\n+\n+@Entity()\n+export class Employee extends Person {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property()\n+ name!: string;\n+\n+ // ... more fields and methods\n+\n+}\n+\n+@Entity()\n+export class Toothbrush {\n+ \n+ @PrimaryKey()\n+ id!: number;\n+\n+ // ... more fields and methods\n+\n+}\n+```\n+\n+The DDL for the corresponding database schema would look something like this (this is for SQLite):\n+\n+```sql\n+create table `employee` (\n+ `id` int unsigned not null auto_increment primary key,\n+ `name` varchar(255) not null, `mapped1` integer not null,\n+ `mapped2` varchar(255) not null,\n+ `toothbrush_id` integer not null\n+);\n+```\n+\n+As you can see from this DDL snippet, there is only a single table for the entity \n+subclass. All the mappings from the mapped superclass were inherited to the subclass \n+as if they had been defined on that class directly.\n+\n+## Single Table Inheritance\n+\n+> Support for STI was added in version 4.0\n+\n+[Single Table Inheritance](https://martinfowler.com/eaaCatalog/singleTableInheritance.html) \n+is an inheritance mapping strategy where all classes of a hierarchy are mapped to a single \n+database table. In order to distinguish which row represents which type in the hierarchy \n+a so-called discriminator column is used.\n+\n+```typescript\n+@Entity({\n+ discriminatorColumn: 'discr',\n+ discriminatorMap: { person: 'Person', employee: 'Employee' },\n+})\n+export class Person {\n+ // ...\n+}\n+\n+@Entity()\n+export class Employee extends Person {\n+ // ...\n+}\n+```\n+\n+Things to note:\n+\n+- The `discriminatorColumn` option must be specified on the topmost class that is \n+ part of the mapped entity hierarchy.\n+- The `discriminatorMap` specifies which values of the discriminator column identify \n+ a row as being of a certain type. In the case above a value of `person` identifies\n+ a row as being of type `Person` and `employee` identifies a row as being of type \n+ `Employee`.\n+- All entity classes that are part of the mapped entity hierarchy (including the topmost \n+ class) should be specified in the `discriminatorMap`. In the case above `Person` class\n+ included.\n+- We can use abstract class as the root entity - then the root class should not be part\n+ of the discriminator map\n+- If no discriminator map is provided, then the map is generated automatically. \n+ The automatically generated discriminator map contains the table names that would be\n+ otherwise used in case of regular entities. \n+\n+### Using `discriminatorValue` instead of `discriminatorMap`\n+\n+As noted above, the discriminator map can be auto-generated. In that case, we might\n+want to control the tokens that will be used in the map. To do so, we can use \n+`discriminatorValue` on the child entities:\n+\n+```ts\n+@Entity({\n+ discriminatorColumn: 'discr',\n+ discriminatorValue: 'person',\n+})\n+export class Person {\n+ // ...\n+}\n+\n+@Entity({\n+ discriminatorValue: 'employee',\n+})\n+export class Employee extends Person {\n+ // ...\n+}\n+```\n+\n+### Explicit discriminator column\n+\n+The `discriminatorColumn` specifies the name of special column that will be used to\n+define what type of class should given row be represented with. It will be defined \n+automatically for you and it will stay hidden (it won't by hydrated as regular property). \n+\n+On the other hand, it is perfectly fine to define the column explicitly. Doing so, \n+you will be able to:\n+\n+- querying by the type, e.g. `em.find(Person, { type: { $ne: 'employee' } }`\n+- the column will be part of the serialized response\n+\n+Following example shows how we can define the discriminator explicitly, as well\n+as a version where root entity is abstract class.\n+\n+```ts\n+@Entity({\n+ discriminatorColumn: 'type',\n+ discriminatorMap: { person: 'Person', employee: 'Employee' },\n+})\n+export abstract class BasePerson {\n+\n+ @Enum()\n+ type!: 'person' | 'employee';\n+\n+}\n+\n+@Entity()\n+export class Person extends Base {\n+ // ...\n+}\n+\n+@Entity()\n+export class Employee extends Person {\n+ // ...\n+}\n+```\n+\n+If we wanted to use `discriminatorValue` with abstract entities, we need to mark\n+the entity as `abstract: true` so it can be skipped from the discriminator map:\n+\n+```ts\n+@Entity({\n+ discriminatorColumn: 'type',\n+ abstract: true,\n+})\n+export abstract class BasePerson {\n+\n+ @Enum()\n+ type!: 'person' | 'employee';\n+\n+}\n+\n+@Entity({ discriminatorValue: 'person' })\n+export class Person extends Base {\n+ // ...\n+}\n+\n+@Entity({ discriminatorValue: 'employee' })\n+export class Employee extends Person {\n+ // ...\n+}\n+```\n+\n+### Design-time considerations\n+\n+This mapping approach works well when the type hierarchy is fairly simple and stable. \n+Adding a new type to the hierarchy and adding fields to existing supertypes simply \n+involves adding new columns to the table, though in large deployments this may have \n+an adverse impact on the index and column layout inside the database.\n+\n+### Performance impact\n+\n+This strategy is very efficient for querying across all types in the hierarchy or \n+for specific types. No table joins are required, only a WHERE clause listing the \n+type identifiers. In particular, relationships involving types that employ this \n+mapping strategy are very performing.\n+\n+### SQL Schema considerations\n+\n+For Single-Table-Inheritance to work in scenarios where you are using either a legacy \n+database schema or a self-written database schema you have to make sure that all \n+columns that are not in the root entity but in any of the different sub-entities \n+has to allow null values. Columns that have NOT NULL constraints have to be on the \n+root entity of the single-table inheritance hierarchy.\n+\n+> This part of documentation is highly inspired by [doctrine docs](https://www.doctrine-project.org/projects/doctrine-orm/en/latest/reference/inheritance-mapping.html)\n+> as the behaviour here is pretty much the same.\n", "installation.md": "@@ -0,0 +1,271 @@\n+---\n+title: Installation & Usage\n+---\n+\n+First install the module via `yarn` or `npm` and do not forget to install the \n+driver package as well:\n+\n+> Since v4, you should install the driver package, but not the db connector itself,\n+> e.g. install `@mikro-orm/sqlite`, but not `sqlite3` as that is already included\n+> in the driver package.\n+\n+```sh\n+yarn add @mikro-orm/core @mikro-orm/mongodb # for mongo\n+yarn add @mikro-orm/core @mikro-orm/mysql # for mysql/mariadb\n+yarn add @mikro-orm/core @mikro-orm/mariadb # for mysql/mariadb\n+yarn add @mikro-orm/core @mikro-orm/postgresql # for postgresql\n+yarn add @mikro-orm/core @mikro-orm/sqlite # for sqlite\n+```\n+\n+or\n+\n+```sh\n+npm i -s @mikro-orm/core @mikro-orm/mongodb # for mongo\n+npm i -s @mikro-orm/core @mikro-orm/mysql # for mysql/mariadb\n+npm i -s @mikro-orm/core @mikro-orm/mariadb # for mysql/mariadb\n+npm i -s @mikro-orm/core @mikro-orm/postgresql # for postgresql\n+npm i -s @mikro-orm/core @mikro-orm/sqlite # for sqlite\n+```\n+\n+Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html)\n+as well as `esModuleInterop` in `tsconfig.json` via:\n+\n+```json\n+\"experimentalDecorators\": true,\n+\"emitDecoratorMetadata\": true,\n+\"esModuleInterop\": true,\n+```\n+\n+Then call `MikroORM.init` as part of bootstrapping your app:\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, BookTag],\n+ dbName: 'my-db-name',\n+ type: 'mongo', // one of `mongo` | `mysql` | `mariadb` | `postgresql` | `sqlite`\n+ clientUrl: '...', // defaults to 'mongodb://localhost:27017' for mongodb driver\n+});\n+console.log(orm.em); // access EntityManager via `em` property\n+```\n+\n+> Read more about all the possible configuration options in [Advanced Configuration](configuration.md) section.\n+\n+We can also provide paths where you store your entities via `entities` array. Internally\n+it uses [`globby`](https://github.com/sindresorhus/globby) so we can use \n+[globbing patterns](https://github.com/sindresorhus/globby#globbing-patterns), \n+including negative globs. \n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: ['./dist/app/**/entities'],\n+ // ...\n+});\n+```\n+\n+If you are experiencing problems with folder based discovery, try using `mikro-orm debug`\n+CLI command to check what paths are actually being used.\n+\n+> Since v4, you can also use file globs, like `./dist/app/**/entities/*.entity.js`.\n+\n+> You can pass additional options to the underlying driver (e.g. `mysql2`) via `driverOptions`. \n+> The object will be deeply merged, overriding all internally used options.\n+\n+## Possible issues with circular dependencies\n+\n+Your entities will most probably contain circular dependencies (e.g. if you use bi-directional \n+relationship). While this is fine, there might be issues caused by wrong order of entities \n+during discovery, especially when you are using the folder based way.\n+\n+The errors caused by circular dependencies are usually similar to this one:\n+\n+```\n+TypeError: Cannot read property 'name' of undefined\n+ at Function.className (/path/to/project/node_modules/mikro-orm/dist/utils/Utils.js:253:28)\n+ at TsMorphMetadataProvider.extractType (/path/to/project/node_modules/mikro-orm/dist/metadata/TsMorphMetadataProvider.js:37:34)\n+ at TsMorphMetadataProvider.initProperties (/path/to/project/node_modules/mikro-orm/dist/metadata/TsMorphMetadataProvider.js:25:31)\n+ at TsMorphMetadataProvider.loadEntityMetadata (/path/to/project/node_modules/mikro-orm/dist/metadata/TsMorphMetadataProvider.js:16:9)\n+ at MetadataDiscovery.discoverEntity (/path/to/project/node_modules/mikro-orm/dist/metadata/MetadataDiscovery.js:109:9)\n+ at MetadataDiscovery.discoverDirectory (/path/to/project/node_modules/mikro-orm/dist/metadata/MetadataDiscovery.js:80:13)\n+ at Function.runSerial (/path/to/project/node_modules/mikro-orm/dist/utils/Utils.js:303:22)\n+ at MetadataDiscovery.findEntities (/path/to/project/node_modules/mikro-orm/dist/metadata/MetadataDiscovery.js:56:13)\n+ at MetadataDiscovery.discover (/path/to/project/node_modules/mikro-orm/dist/metadata/MetadataDiscovery.js:30:9)\n+ at Function.init (/path/to/project/node_modules/mikro-orm/dist/MikroORM.js:45:24)\n+ at Function.handleSchemaCommand (/path/to/project/node_modules/mikro-orm/dist/cli/SchemaCommandFactory.js:51:21)\n+```\n+\n+If you encounter this, you have basically two options:\n+\n+- Use entity references in `entities` array to have control over the order of discovery. \n+ You might need to play with the actual order you provide here, or possibly with the \n+ order of import statements.\n+- Use strings instead of references (e.g. `@OneToMany('Book', 'author)`). The downside \n+ here is that you will lose the typechecking capabilities of the decorators. \n+\n+## Entity Discovery in TypeScript\n+\n+In v4 the default metadata provider is `ReflectMetadataProvider`. If you want to use\n+`ts-morph` based discovery (that reads actual TS types via the compiler API), you \n+need to install `@mikro-orm/reflection`.\n+\n+```typescript\n+import { TsMorphMetadataProvider } from '@mikro-orm/reflection';\n+\n+const orm = await MikroORM.init({\n+ metadataProvider: TsMorphMetadataProvider,\n+ // ...\n+});\n+```\n+\n+Read more about the differences in [Metadata Providers section](metadata-providers.md).\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: ['./dist/entities/**/*.js'], // path to your JS entities (dist), relative to `baseDir`\n+ entitiesTs: ['./src/entities/**/*.ts'], // path to your TS entities (source), relative to `baseDir`\n+ // ...\n+});\n+```\n+\n+> It is important that `entities` will point to the compiled JS files, and `entitiesTs`\n+> will point to the TS source files. You should not mix those. \n+\n+> For `ts-morph` discovery to work in production, we need to deploy `.d.ts` declaration\n+> files. Be sure to enable `compilerOptions.declaration` in your `tsconfig.json`.\n+\n+You can also use different [metadata provider](metadata-providers.md) or even write custom one:\n+\n+- `ReflectMetadataProvider` that uses `reflect-metadata` instead of `ts-morph`\n+- `JavaScriptMetadataProvider` that allows you to manually provide the entity schema (mainly for Vanilla JS)\n+\n+> Using [`EntitySchema`](entity-schema.md) is another way to define your entities, which is better\n+> suited than using `JavaScriptMetadataProvider`.\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ // default in v4, so not needed to specify explicitly\n+ metadataProvider: ReflectMetadataProvider,\n+ // ...\n+});\n+```\n+\n+## Request Context\n+\n+Then you will need to fork Entity Manager for each request so their identity maps will not \n+collide. To do so, use the `RequestContext` helper:\n+\n+```typescript\n+const app = express();\n+\n+app.use((req, res, next) => {\n+ RequestContext.create(orm.em, next);\n+});\n+```\n+\n+> If the `next` handler needs to be awaited (like in Koa), \n+> use `RequestContext.createAsync()` instead.\n+>\n+> ```typescript\n+> app.use((ctx, next) => RequestContext.createAsync(orm.em, next));\n+> ```\n+\n+More info about `RequestContext` is described [here](identity-map.md#request-context).\n+\n+## Setting up the Commandline Tool\n+\n+MikroORM ships with a number of command line tools that are very helpful during development, \n+like `SchemaGenerator` and `EntityGenerator`. You can call this command from the NPM binary \n+directory or use `npx`:\n+\n+> To work with the CLI, first install `@mikro-orm/cli` package.\n+\n+```sh\n+$ node node_modules/.bin/mikro-orm\n+$ npx mikro-orm\n+\n+# or when installed globally\n+$ mikro-orm\n+```\n+\n+For CLI to be able to access your database, you will need to create `mikro-orm.config.js` file that \n+exports your ORM configuration. TypeScript is also supported, just enable `useTsNode` flag in your\n+`package.json` file. There you can also set up array of possible paths to `mikro-orm.config` file,\n+as well as use different file name:\n+\n+> Do not forget to install `ts-node` when enabling `useTsNode` flag.\n+\n+MikroORM will always try to load the first available config file, based on the \n+order in `configPaths`. This means that if you specify the first item as the TS \n+config, but you do not have `ts-node` enabled and installed, it will fail to \n+load it.\n+\n+```json title=\"./package.json\"\n+{\n+ \"name\": \"your-app\",\n+ \"dependencies\": { ... },\n+ \"mikro-orm\": {\n+ \"useTsNode\": true,\n+ \"configPaths\": [\n+ \"./src/mikro-orm.config.ts\",\n+ \"./dist/mikro-orm.config.js\"\n+ ]\n+ }\n+}\n+```\n+\n+```typescript title=\"./src/mikro-orm.config.ts\"\n+export default {\n+ entities: [Author, Book, BookTag],\n+ dbName: 'my-db-name',\n+ type: 'mongo', // one of `mongo` | `mysql` | `mariadb` | `postgresql` | `sqlite`\n+};\n+```\n+\n+Once you have the CLI config properly set up, you can omit the `MikroORM.init()` options\n+parameter and the CLI config will be automatically used. \n+\n+> You can also use different names for this file, simply rename it in the `configPaths` array\n+> your in `package.json`. You can also use `MIKRO_ORM_CLI` environment variable with the path\n+> to override `configPaths` value.\n+\n+Now you should be able to start using the CLI. All available commands are listed in the CLI help:\n+\n+```sh\n+Usage: mikro-orm <command> [options]\n+\n+Commands:\n+ mikro-orm cache:clear Clear metadata cache\n+ mikro-orm cache:generate Generate metadata cache for production\n+ mikro-orm generate-entities Generate entities based on current database\n+ schema\n+ mikro-orm database:import <file> Imports the SQL file to the database\n+ mikro-orm schema:create Create database schema based on current\n+ metadata\n+ mikro-orm schema:drop Drop database schema based on current\n+ metadata\n+ mikro-orm schema:update Update database schema based on current\n+ metadata\n+ mikro-orm migration:create Create new migration with current schema\n+ diff\n+ mikro-orm migration:up Migrate up to the latest version\n+ mikro-orm migration:down Migrate one step down\n+ mikro-orm migration:list List all executed migrations\n+ mikro-orm migration:pending List all pending migrations\n+ mikro-orm debug Debug CLI configuration\n+\n+Options:\n+ -v, --version Show version number [boolean]\n+ -h, --help Show help [boolean]\n+\n+Examples:\n+ mikro-orm schema:update --run Runs schema synchronization\n+```\n+\n+To verify your setup, you can use `mikro-orm debug` command.\n+\n+> When you have CLI config properly set up, you can omit the `options` parameter\n+> when calling `MikroORM.init()`.\n+\n+> Note: When importing a dump file you need `multipleStatements: true` in your\n+> configuration. Please check the configuartion documentation for more information.\n+\n+Now you can start [defining your entities](defining-entities.md).\n", "lifecycle-hooks.md": "@@ -0,0 +1,218 @@\n+---\n+title: Lifecycle Hooks and EventSubscriber\n+sidebar_label: Hooks and Events\n+---\n+\n+There are two ways to hook to the lifecycle of an entity: \n+\n+- **Lifecycle hooks** are methods defined on the entity prototype.\n+- **EventSubscriber**s are classes that can be used to hook to multiple entities\n+ or when you do not want to have the method present on the entity prototype.\n+\n+> Hooks are internally executed the same way as subscribers.\n+\n+> Hooks are executed before subscribers.\n+\n+## Hooks\n+\n+You can use lifecycle hooks to run some code when entity gets persisted. You can mark any of\n+entity methods with them, you can also mark multiple methods with same hook.\n+\n+All hooks support async methods with one exception - `@OnInit`.\n+\n+- `@OnInit` is fired when new instance of entity is created, either manually `em.create()`, or \n+automatically when new entities are loaded from database\n+\n+- `@BeforeCreate()` and `@BeforeUpdate()` is fired right before we persist the entity in database\n+\n+- `@AfterCreate()` and `@AfterUpdate()` is fired right after the entity is updated in database and \n+merged to identity map. Since this event entity will have reference to `EntityManager` and will be \n+enabled to call `entity.init()` method (including all entity references and collections).\n+\n+- `@BeforeDelete()` is fired right before we delete the record from database. It is fired only when\n+removing entity or entity reference, not when deleting records by query. \n+\n+- `@AfterDelete()` is fired right after the record gets deleted from database and it is unset from \n+the identity map.\n+\n+> `@OnInit` is not fired when you create the entity manually via its constructor (`new MyEntity()`)\n+\n+## Limitations of lifecycle hooks\n+\n+Hooks are executed inside the commit action of unit of work, after all change \n+sets are computed. This means that it is not possible to create new entities as\n+usual from inside the hook. Calling `em.flush()` from hooks will result in \n+validation error. Calling `em.persist()` can result in undefined behaviour like\n+locking errors. \n+\n+> The **internal** instance of `EntityManager` accessible under `wrap(this, true).__em` is \n+> not meant for public usage. \n+\n+## EventSubscriber\n+\n+Use `EventSubscriber` to hook to multiple entities or if you do not want to pollute\n+the entity prototype. All methods are optional, if you omit the `getSubscribedEntities()`\n+method, it means you are subscribing to all entities.\n+\n+You can either register the subscribers manually in the ORM configuration (via \n+`subscribers` array where you put the instance):\n+\n+```typescript\n+MikroORM.init({\n+ subscribers: [new AuthorSubscriber()],\n+});\n+```\n+\n+Or use `@Subscriber()` decorator - keep in mind that you need to make sure the file gets \n+loaded in order to make this decorator registration work (e.g. you import that file \n+explicitly somewhere).\n+\n+```typescript\n+import { EntityName, EventArgs, EventSubscriber, Subscriber } from '@mikro-orm/core';\n+\n+@Subscriber()\n+export class AuthorSubscriber implements EventSubscriber<Author> {\n+\n+ getSubscribedEntities(): EntityName<Author2>[] {\n+ return [Author2];\n+ }\n+\n+ async afterCreate(args: EventArgs<Author2>): Promise<void> {\n+ // ...\n+ }\n+\n+ async afterUpdate(args: EventArgs<Author2>): Promise<void> {\n+ // ... \n+ }\n+\n+}\n+```\n+\n+Another example, where we register to all the events and all entities: \n+\n+```typescript\n+import { EventArgs, EventSubscriber, Subscriber } from '@mikro-orm/core';\n+\n+@Subscriber()\n+export class EverythingSubscriber implements EventSubscriber {\n+\n+ async beforeCreate<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async afterCreate<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async beforeUpdate<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async afterUpdate<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async beforeDelete<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async afterDelete<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async beforeFlush<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async onFlush<T>(args: EventArgs<T>): Promise<void> { ... }\n+ async afterFlush<T>(args: EventArgs<T>): Promise<void> { ... }\n+ onInit<T>(args: EventArgs<T>): void { ... }\n+\n+}\n+```\n+\n+## EventArgs\n+\n+As a parameter to the hook method we get `EventArgs` instance. It will always contain\n+reference to the current `EntityManager` and the particular entity. Events fired\n+from `UnitOfWork` during flush operation also contain the `ChangeSet` object.\n+\n+```typescript\n+interface EventArgs<T> {\n+ entity: T;\n+ em: EntityManager;\n+ changeSet?: ChangeSet<T>;\n+}\n+\n+interface ChangeSet<T> {\n+ name: string; // entity name\n+ collection: string; // db table name\n+ type: ChangeSetType; // type of operation\n+ entity: T; // up to date entity instance\n+ payload: EntityData<T>; // changes that will be used to build the update query\n+ persisted: boolean; // whether the changeset was already persisted/executed\n+ originalEntity?: EntityData<T>; // snapshot of the entity when it was loaded from db\n+}\n+\n+enum ChangeSetType {\n+ CREATE = 'create',\n+ UPDATE = 'update',\n+ DELETE = 'delete',\n+}\n+```\n+\n+## Flush events\n+\n+There is a special kind of events executed during the commit phase (flush operation).\n+They are executed before, during and after the flush, and they are not bound to any\n+entity in particular. \n+\n+- `beforeFlush` is executed before change sets are computed, this is the only\n+ event where it is safe to persist new entities. \n+- `onFlush` is executed after the change sets are computed.\n+- `afterFlush` is executed as the last step just before the `flush` call resolves.\n+ it will be executed even if there are no changes to be flushed. \n+\n+Flush event args will not contain any entity instance, as they are entity agnostic.\n+They do contain additional reference to the `UnitOfWork` instance.\n+\n+```typescript\n+interface FlushEventArgs extends Omit<EventArgs<unknown>, 'entity'> {\n+ uow?: UnitOfWork;\n+}\n+``` \n+\n+> Flush events are entity agnostic, specifying `getSubscribedEntities()` method\n+> will not have any effect for those. They are fired only once per the `flush` \n+> operation.\n+\n+### Getting the changes from UnitOfWork\n+\n+You can observe all the changes that are part of given UnitOfWork via those methods:\n+\n+```typescript\n+UnitOfWork.getChangeSets(): ChangeSet<AnyEntity>[];\n+UnitOfWork.getOriginalEntityData(): Map<string, EntityData<AnyEntity>>;\n+UnitOfWork.getPersistStack(): Set<AnyEntity>;\n+UnitOfWork.getRemoveStack(): Set<AnyEntity>;\n+UnitOfWork.getCollectionUpdates(): Collection<AnyEntity>[];\n+UnitOfWork.getExtraUpdates(): Set<[AnyEntity, string, (AnyEntity | Reference<AnyEntity>)]>;\n+```\n+\n+### Using onFlush event\n+\n+In following example we have 2 entities: `FooBar` and `FooBaz`, connected via \n+M:1 relation. Our subscriber will automatically create new `FooBaz` entity and \n+connect it to the `FooBar` when we detect it in the change sets.\n+\n+We first use `uow.getChangeSets()` method to look up the change set of entity\n+we are interested in. After we create the `FooBaz` instance and link it with \n+`FooBar`, we need to do two things:\n+\n+1. Call `uow.computeChangeSet(baz)` to compute the change set of newly created \n+ `FooBaz` entity\n+2. Call `uow.recomputeSingleChangeSet(cs.entity)` to recalculate the existing \n+ change set of the `FooBar` entity.\n+\n+```typescript\n+@Subscriber()\n+export class FooBarSubscriber implements EventSubscriber {\n+\n+ async onFlush(args: FlushEventArgs): Promise<void> {\n+ const changeSets = args.uow.getChangeSets();\n+ const cs = changeSets.find(cs => cs.type === ChangeSetType.CREATE && cs.entity instanceof FooBar);\n+\n+ if (cs) {\n+ const baz = new FooBaz();\n+ baz.name = 'dynamic';\n+ cs.entity.baz = baz;\n+ args.uow.computeChangeSet(baz);\n+ args.uow.recomputeSingleChangeSet(cs.entity);\n+ }\n+ }\n+\n+}\n+\n+const bar = new FooBar();\n+bar.name = 'bar';\n+await em.persistAndFlush(bar);\n+```\n", "loading-strategies.md": "@@ -0,0 +1,85 @@\n+---\n+title: Relationship Loading Strategies\n+sidebar_label: Loading Strategies\n+---\n+\n+> SQL only feature\n+\n+Controls how relationships get loaded when querying. By default, populated relationships\n+are loaded via the `select-in` strategy. This strategy issues one additional `SELECT`\n+statement per relation being loaded.\n+\n+The loading strategy can be specified both at mapping time and when loading entities.\n+\n+For example, given the following entities:\n+\n+```typescript\n+import { Entity, LoadStrategy, OneToMany, ManyToOne } from '@mikro-orm/core';\n+\n+@Entity()\n+export class Author {\n+ @OneToMany(() => Book, b => b.author)\n+ books = new Collection<Book>(this);\n+}\n+\n+@Entity()\n+export class Book {\n+ @ManyToOne()\n+ author: Author;\n+}\n+```\n+\n+The following will issue two SQL statements.\n+One to load the author and another to load all the books belonging to that author:\n+\n+```typescript\n+const author = await orm.em.findOne(Author, 1, ['books']);\n+```\n+\n+If we update the `Author.books` mapping to the following:\n+\n+```typescript\n+import { Entity, LoadStrategy, OneToMany } from '@mikro-orm/core';\n+\n+@Entity()\n+export class Author {\n+ @OneToMany({\n+ entity: () => Book,\n+ mappedBy: b => b.author,\n+ strategy: LoadStrategy.JOINED,\n+ })\n+ books = new Collection<Book>(this);\n+}\n+```\n+\n+The following will issue **one** SQL statement:\n+\n+```typescript\n+const author = await orm.em.findOne(Author, 1, ['books']);\n+```\n+\n+You can also specify the load strategy as needed. This will override whatever strategy is declared in the mapping.\n+This also works for nested populates:\n+\n+```typescript\n+// one level\n+const author = await orm.em.findOne(Author, 1, { populate: { books: LoadStrategy.JOINED } });\n+\n+// two levels\n+const author = await orm.em.findOne(Author, 1, { populate: {\n+ books: [LoadStrategy.JOINED, { publisher: LoadStrategy.JOINED }]\n+} });\n+```\n+\n+## Changing the loading strategy globally\n+\n+You can use `loadStrategy` option in the ORM config:\n+\n+```ts\n+MikroORM.init({\n+ loadStrategy: LoadStrategy.JOINED,\n+});\n+```\n+\n+This value will be used as the default, specifying the loading strategy on \n+property level has precedence, as well as specifying it in the `FindOptions`.\n", "metadata-cache.md": "@@ -0,0 +1,87 @@\n+---\n+title: Metadata Cache\n+---\n+\n+> In v4 you need to explicitly install `@mikro-orm/reflection` to use `TsMorphMetadataProvider`.\n+\n+Under the hood, `MikroORM` uses [`ts-morph`](https://github.com/dsherret/ts-morph) to read \n+TypeScript source files of all entities to be able to detect all types. Thanks to this, \n+defining the type is enough for runtime validation.\n+\n+If you use folder-based discovery, you should specify paths to\n+the compiled entities via `entities` as well as paths to the TS source files of\n+those entities via `entitiesTs`. When you run the ORM via `ts-node`, the latter\n+will be used automatically, or if you explicitly pass `tsNode: true` in the config.\n+\n+After the discovery process ends, all metadata will be cached. By default, `FileCacheAdapter`\n+will be used to store the cache inside `./temp` folder to JSON files. \n+\n+## Automatic Invalidation\n+\n+Entity metadata are cached together with modified time of the source file, and every time\n+the cache is requested, it first checks if the cache is not invalid. This way you can forgot \n+about the caching mechanism most of the time.\n+\n+One case where you can end up needing to wipe the cache manually is when you work withing a \n+git branch where contents of entities folder differs. \n+\n+## Disabling Metadata Cache\n+\n+You can disable caching via:\n+\n+```typescript\n+await MikroORM.init({\n+ cache: { enabled: false },\n+ // ...\n+});\n+```\n+\n+## Pretty Printing\n+\n+By default, cached metadata will be one line JSON string. You can force pretty printing it:\n+\n+```typescript\n+await MikroORM.init({\n+ cache: { pretty: true },\n+ // ...\n+});\n+```\n+\n+## Using Different temp Folder\n+\n+You can set the temp folder via:\n+\n+```typescript\n+await MikroORM.init({\n+ cache: { options: { cacheDir: '...' } },\n+ // ...\n+});\n+```\n+\n+## Providing Custom Cache Adapter\n+\n+You can also implement your own cache adapter, for example to store the cache in redis. \n+To do so, just implement simple `CacheAdapter` interface:\n+\n+```typescript\n+export interface CacheAdapter {\n+\n+ get(name: string): any;\n+\n+ set(name: string, data: any, origin: string): void;\n+\n+}\n+```\n+\n+```typescript\n+export class RedisCacheAdapter implements CacheAdapter { ... }\n+```\n+\n+And provide the implementation in `cache.adapter` option:\n+\n+```typescript\n+await MikroORM.init({\n+ cache: { adapter: RedisCacheAdapter, options: { ... } },\n+ // ...\n+});\n+```\n", "metadata-providers.md": "@@ -0,0 +1,156 @@\n+---\n+title: Metadata Providers\n+---\n+\n+As part of entity discovery process, MikroORM uses so called `MetadataProvider` to get necessary\n+type information about your entities' properties. There are 3 built-in metadata providers you can \n+use:\n+\n+> You can also implement custom metadata provider by extending abstract `MetadataProvider` class.\n+\n+## TsMorphMetadataProvider\n+\n+With `TsMorphMetadataProvider` MikroORM will use [`ts-morph`](https://github.com/dsherret/ts-morph) to read \n+TypeScript source files of all entities to be able to detect all types. Thanks to this, \n+defining the type is enough for runtime validation.\n+\n+To use it, first install the `@mikro-orm/reflection` package.\n+\n+```typescript\n+import { TsMorphMetadataProvider } from '@mikro-orm/reflection';\n+\n+await MikroORM.init({\n+ metadataProvider: TsMorphMetadataProvider,\n+ // ...\n+});\n+```\n+\n+If you use folder-based discovery, you should specify paths to\n+the compiled entities via `entities` as well as paths to the TS source files of\n+those entities via `entitiesTs`. When you run the ORM via `ts-node`, the latter\n+will be used automatically, or if you explicitly pass `tsNode: true` in the config.\n+\n+> When running via `node`, `.d.ts` files are used to obtain the type, so we \n+> need to ship them in the production build. TS source files are no longer \n+> needed (since v4). Be sure to enable `compilerOptions.declaration` in your\n+> `tsconfig.json`.\n+\n+After the discovery process ends, all [metadata will be cached](metadata-cache.md). By default, \n+`FileCacheAdapter` will be used to store the cache inside `./temp` folder in JSON files. \n+\n+> You can generate production cache via CLI command `mikro-orm cache:generate`.\n+\n+> You can implement custom cache adapter by implementing `CacheAdapter` interface.\n+\n+## ReflectMetadataProvider\n+\n+`ReflectMetadataProvider` uses `reflect-metadata` module to read the type from decorator \n+metadata exported by TypeScript compiler. \n+\n+You will need to install `reflect-metadata` module and import at the top of your app's \n+bootstrap script (e.g. `main.ts` or `app.ts`). \n+\n+```typescript\n+import 'reflect-metadata';\n+```\n+\n+Next step is to enable `emitDecoratorMetadata` flag in your `tsconfig.json`.\n+\n+> As this approach does not have performance impact, metadata caching is not really necessary. \n+\n+```typescript\n+await MikroORM.init({\n+ metadataProvider: ReflectMetadataProvider,\n+ // ...\n+});\n+```\n+\n+### Limitations and requirements\n+\n+#### Explicit types\n+\n+Type inference is not supported, you need to always explicitly specify the type:\n+\n+```typescript\n+@Property()\n+createdAt: Date = new Date();\n+```\n+\n+#### Collection properties and Identified references\n+\n+You need to provide target entity type in `@OneToMany` and `@ManyToMany` decorators:\n+\n+```typescript\n+@OneToMany(() => Book, b => b.author)\n+books = new Collection<Book>(this);\n+\n+@ManyToOne(() => Publisher, { wrappedReference: true })\n+publisher!: IdentifiedReference<Publisher>;\n+```\n+\n+#### Optional properties\n+\n+Reading property nullability is not supported, you need to explicitly set `nullable` attribute:\n+\n+```typescript\n+@Property({ nullable: true })\n+prop?: string;\n+```\n+\n+#### Enums\n+\n+By default, enum is considered as numeric type. For string enums, you need to explicitly \n+provide one of:\n+\n+- reference to the enum (which will force you to define the enum before defining the entity)\n+ ```typescript\n+ @Enum(() => UserRole)\n+ role: UserRole;\n+ ```\n+- name of the enum (if it is present in the same file)\n+ ```typescript\n+ @Enum({ type: 'UserRole' })\n+ role: UserRole;\n+ ```\n+- list of the enum items\n+ ```typescript\n+ @Enum({ items: ['a', 'b', 'c'] })\n+ role: UserRole;\n+ ```\n+\n+#### Circular dependencies\n+\n+Reading type of referenced entity in `@ManyToOne` and `@OneToOne` properties fails if there is \n+circular dependency. You will need to explicitly define the type in the decorator (preferably \n+via `entity: () => ...` callback).\n+\n+```typescript\n+@ManyToOne({ entity: () => Author })\n+author: Author;\n+``` \n+\n+> There can be recursion issues when you define multiple entities (with circular dependencies \n+> between each other) in single file. In that case, you might want to provide the type via decorator's\n+> `type` or `entity` attributes and set the TS property type to something else (like `any` or `object`).\n+\n+#### Additional typings might be required\n+\n+You might have to install additional typings, one example is use of `ObjectId` in MongoDB, \n+which requires `@types/mongodb` to be installed. \n+\n+## JavaScriptMetadataProvider\n+\n+> `JavaScriptMetadataProvider` is deprecated, [use `EntitySchema` instead](entity-schema.md).\n+\n+This provider should be used only if you are not using TypeScript at all and therefore you do \n+not use decorators to annotate your properties. It will require you to specify the whole schema \n+manually. \n+\n+```typescript\n+await MikroORM.init({\n+ metadataProvider: JavaScriptMetadataProvider,\n+ // ...\n+});\n+```\n+\n+You can read more about it in [Usage with JavaScript section](usage-with-js.md).\n", "migrations.md": "@@ -0,0 +1,194 @@\n+---\n+title: Migrations\n+---\n+\n+MikroORM has integrated support for migrations via [umzug](https://github.com/sequelize/umzug).\n+It allows you to generate migrations with current schema differences.\n+\n+By default, each migration will be all executed inside a transaction, and all of them will \n+be wrapped in one master transaction, so if one of them fails, everything will be rolled back. \n+\n+## Migration class\n+\n+Migrations are classes that extend Migration abstract class:\n+\n+```typescript\n+export class Migration20191019195930 extends Migration {\n+\n+ async up(): Promise<void> {\n+ this.addSql('select 1 + 1');\n+ }\n+\n+}\n+```\n+\n+To support undoing those changed, you can implement the `down` method, which throws an error by default. \n+\n+Migrations are by default wrapped in a transaction. You can override this behaviour on \n+per migration basis by implementing the `isTransactional(): boolean` method.\n+\n+`Configuration` object and driver instance are available in the `Migration` class context.\n+\n+You can execute queries in the migration via `Migration.execute()` method, which \n+will run queries in the same transaction as the rest of the migration. The \n+`Migration.addSql()` method also accepts instances of knex. Knex instance can be \n+accessed via `Migration.getKnex()`; \n+\n+## Initial migration\n+\n+If you want to start using migrations, and you already have the schema generated, \n+you can do so by creating so called initial migration:\n+\n+> Initial migration can be created only if there are no migrations previously\n+> generated or executed. \n+\n+```sh\n+npx mikro-orm migration:create --initial\n+```\n+\n+This will create the initial migration, containing the schema dump from \n+`schema:create` command. The migration will be automatically marked as executed. \n+\n+## Configuration\n+\n+```typescript\n+await MikroORM.init({\n+ // default values:\n+ migrations: {\n+ tableName: 'mikro_orm_migrations', // name of database table with log of executed transactions\n+ path: './migrations', // path to the folder with migrations\n+ pattern: /^[\\w-]+\\d+\\.ts$/, // regex pattern for the migration files\n+ transactional: true, // wrap each migration in a transaction\n+ disableForeignKeys: true, // wrap statements with `set foreign_key_checks = 0` or equivalent\n+ allOrNothing: true, // wrap all migrations in master transaction\n+ dropTables: true, // allow to disable table dropping\n+ safe: false, // allow to disable table and column dropping\n+ emit: 'ts', // migration generation mode\n+ },\n+})\n+```\n+\n+## Using via CLI\n+\n+You can use it via CLI: \n+\n+```sh\n+npx mikro-orm migration:create # Create new migration with current schema diff\n+npx mikro-orm migration:up # Migrate up to the latest version\n+npx mikro-orm migration:down # Migrate one step down\n+npx mikro-orm migration:list # List all executed migrations\n+npx mikro-orm migration:pending # List all pending migrations\n+```\n+\n+For `migration:up` and `migration:down` commands you can specify `--from` (`-f`), `--to` (`-t`) \n+and `--only` (`-o`) options to run only a subset of migrations:\n+\n+```sh\n+npx mikro-orm migration:up --from 2019101911 --to 2019102117 # the same as above\n+npx mikro-orm migration:up --only 2019101923 # apply a single migration\n+npx mikro-orm migration:down --to 0 # migratee down all migrations\n+```\n+\n+> To run TS migration files, you will need to [enable `useTsNode` flag](installation.md) \n+> in your `package.json`.\n+\n+## Using the Migrator programmatically\n+\n+Or you can create a simple script where you initialize MikroORM like this:\n+\n+```typescript title=\"./migrate.ts\"\n+import { MikroORM } from '@mikro-orm/core';\n+\n+(async () => {\n+ const orm = await MikroORM.init({\n+ dbName: 'your-db-name',\n+ // ...\n+ });\n+\n+ const migrator = orm.getMigrator();\n+ await migrator.createMigration(); // creates file Migration20191019195930.ts\n+ await migrator.up(); // runs migrations up to the latest\n+ await migrator.up('up-to-name'); // runs migrations up to given version\n+ await migrator.down('down-to-name'); // runs migrations down to given version\n+ await migrator.down(); // migrates one step down\n+ await migrator.down({ to: 0 }); // migrates down to the first version\n+\n+ await orm.close(true);\n+})();\n+```\n+\n+Then run this script via `ts-node` (or compile it to plain JS and use `node`):\n+\n+```sh\n+$ ts-node migrate\n+```\n+\n+## Providing transaction context\n+\n+In some cases you might want to control the transaction context yourself:\n+\n+```ts\n+await orm.em.transactional(async em => {\n+ await migrator.up({ transaction: em.getTransactionContext() });\n+});\n+```\n+\n+## Importing migrations statically\n+\n+If you do not want to dynamically import a folder (e.g. when bundling your code with webpack) you can import migrations\n+directly.\n+\n+```typescript\n+import { Migration20191019195930 } from '../migrations/Migration20191019195930.ts';\n+\n+await MikroORM.init({\n+ migrations: {\n+ migrationsList: [\n+ {\n+ name: 'Migration20191019195930.ts',\n+ class: Migration20191019195930,\n+ },\n+ ],\n+ },\n+});\n+```\n+\n+With the help of (webpacks context module api)[https://webpack.js.org/guides/dependency-management/#context-module-api]\n+we can dynamically import the migrations making it possible to import all files in a folder.\n+\n+```typescript\n+import { basename } from 'path';\n+\n+const migrations = {};\n+\n+function importAll(r) {\n+ r.keys().forEach(\n+ (key) => (migrations[basename(key)] = Object.values(r(key))[0])\n+ );\n+}\n+\n+importAll(require.context('../migrations', false, /\\.ts$/));\n+\n+const migrationsList = Object.keys(migrations).map((migrationName) => ({\n+ name: migrationName,\n+ class: migrations[migrationName],\n+}));\n+\n+await MikroORM.init({\n+ migrations: {\n+ migrationsList,\n+ },\n+});\n+```\n+\n+## Limitations\n+\n+### MySQL\n+\n+There is no way to rollback DDL changes in MySQL. An implicit commit is forced for those \n+queries automatically, so transactions are not working as expected. \n+\n+- https://github.com/mikro-orm/mikro-orm/issues/217\n+- https://dev.mysql.com/doc/refman/5.7/en/implicit-commit.html\n+\n+[&larr; Back to table of contents](index.md#table-of-contents)\n", "multiple-schemas.md": "@@ -0,0 +1,37 @@\n+---\n+title: Using Multiple Schemas\n+---\n+\n+In MySQL and PostgreSQL is is possible to define your entities in multiple schemas. In MySQL \n+terminology, it is called database, but from implementation point of view, it is a schema. \n+\n+> To use multiple schemas, your connection needs to have access to all of them (multiple \n+> connections are not supported).\n+\n+All you need to do is simply define the table name including schema name in `collection` option:\n+\n+```typescript\n+@Entity({ tableName: 'first_schema.foo' })\n+export class Foo { ... }\n+\n+@Entity({ tableName: 'second_schema.bar' })\n+export class Bar { ... }\n+```\n+\n+Then use those entities as usual. Resulting SQL queries will use this `tableName` value as a \n+table name so as long as your connection has access to given schema, everything should work \n+as expected.\n+\n+You can also query for entity in specific schema via `EntityManager`, `EntityRepository` or \n+`QueryBuilder`:\n+\n+```typescript\n+const user = await em.findOne(User, { ... }, { schema: 'client-123' });\n+```\n+\n+To create entity in specific schema, you will need to use `QueryBuilder`:\n+\n+```typescript\n+const qb = em.createQueryBuilder(User);\n+await qb.insert({ email: '[email protected]' }).withSchema('client-123');\n+```\n", "naming-strategy.md": "@@ -0,0 +1,98 @@\n+---\n+title: Naming Strategy\n+---\n+\n+When mapping your entities to database tables and columns, their names will be defined by naming \n+strategy. There are 3 basic naming strategies you can choose from:\n+\n+- `UnderscoreNamingStrategy` - default of all SQL drivers\n+- `MongoNamingStrategy` - default of `MongoDriver`\n+- `EntityCaseNamingStrategy` - uses unchanged entity and property names\n+\n+You can override this when initializing ORM. You can also provide your own naming strategy, just \n+implement `NamingStrategy` interface and provide your implementation when bootstrapping ORM:\n+\n+```typescript\n+class YourCustomNamingStrategy implements NamingStrategy {\n+ ...\n+}\n+\n+const orm = await MikroORM.init({\n+ ...\n+ namingStrategy: YourCustomNamingStrategy,\n+ ...\n+});\n+```\n+\n+> You can also extend `AbstractNamingStrategy` which implements one method for you - `getClassName()`\n+> that is used to map entity file name to class name.\n+\n+## Naming Strategy in mongo driver\n+\n+`MongoNamingStrategy` will simply use all field names as they are defined. Collection names will\n+be translated into lower-cased dashed form:\n+\n+`MyCoolEntity` will be translated into `my-cool-entity` collection name.\n+\n+## Naming Strategy in SQL drivers\n+\n+`MySqlDriver` defaults to `UnderscoreNamingStrategy`, which means your all your database tables and\n+columns will be lower-cased and words divided by underscored:\n+\n+```sql\n+CREATE TABLE `author` (\n+ `id` int(11) unsigned NOT NULL AUTO_INCREMENT,\n+ `created_at` datetime(3) DEFAULT NULL,\n+ `updated_at` datetime(3) DEFAULT NULL,\n+ `terms_accepted` tinyint(1) DEFAULT NULL,\n+ `name` varchar(255) DEFAULT NULL,\n+ `email` varchar(255) DEFAULT NULL,\n+ `born` datetime DEFAULT NULL,\n+ `favourite_book_id` int(11) DEFAULT NULL,\n+ PRIMARY KEY (`id`)\n+) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n+```\n+\n+## NamingStrategy API\n+\n+#### `NamingStrategy.getClassName(file: string, separator?: string): string`\n+\n+Return a name of the class based on its file name.\n+\n+---\n+\n+#### `NamingStrategy.classToTableName(entityName: string): string`\n+\n+Return a table name for an entity class.\n+\n+---\n+\n+#### `NamingStrategy.propertyToColumnName(propertyName: string): string`\n+\n+Return a column name for a property.\n+\n+---\n+\n+#### `NamingStrategy.referenceColumnName(): string`\n+\n+Return the default reference column name.\n+\n+---\n+\n+#### `NamingStrategy.joinColumnName(propertyName: string): string`\n+\n+Return a join column name for a property.\n+\n+---\n+\n+#### `NamingStrategy.joinTableName(sourceEntity: string, targetEntity: string, propertyName: string): string`\n+\n+Return a join table name. This is used as default value for `pivotTable`. \n+\n+---\n+\n+#### `NamingStrategy.joinKeyColumnName(entityName: string, referencedColumnName?: string): string`\n+\n+Return the foreign key column name for the given parameters.\n+\n+---\n", "nested-populate.md": "@@ -0,0 +1,70 @@\n+---\n+title: Smart Nested Populate\n+---\n+\n+`MikroORM` is capable of loading large nested structures while maintaining good \n+performance, querying each database table only once. Imagine you have this nested \n+structure:\n+\n+- `Book` has one `Publisher` (M:1), one `Author` (M:1) and many `BookTag`s (M:N)\n+- `Publisher` has many `Test`s (M:N)\n+\n+When you use nested populate while querying all `BookTag`s, this is what happens in\n+the background:\n+\n+```typescript\n+const tags = await orm.em.findAll(BookTag, ['books.publisher.tests', 'books.author']);\n+console.log(tags[0].books[0].publisher.tests[0].name); // prints name of nested test\n+console.log(tags[0].books[0].author.name); // prints name of nested author\n+```\n+\n+1. Load all `BookTag`s\n+2. Load all `Book`s associated with previously loaded `BookTag`s\n+3. Load all `Publisher`s associated with previously loaded `Book`s\n+4. Load all `Test`s associated with previously loaded `Publisher`s\n+5. Load all `Author`s associated with previously loaded `Book`s\n+\n+> You can also populate all relationships by passing `populate: true`.\n+\n+For SQL drivers with pivot tables this means:\n+\n+```sql\n+SELECT `e0`.* FROM `book_tag` AS `e0`;\n+\n+SELECT `e0`.*, `e1`.`book_id`, `e1`.`book_tag_id`\n+ FROM `book` AS `e0` LEFT JOIN `book_to_book_tag` AS `e1` ON `e0`.`id` = `e1`.`book_id`\n+ WHERE `e1`.`book_tag_id` IN (?, ?, ?, ?, ?)\n+ ORDER BY `e1`.`id` ASC;\n+\n+SELECT `e0`.* FROM `publisher` AS `e0` WHERE `e0`.`id` IN (?, ?, ?);\n+\n+SELECT `e0`.*, `e1`.`test_id`, `e1`.`publisher_id`\n+ FROM `test` AS `e0` LEFT JOIN `publisher_to_test` AS `e1` ON `e0`.`id` = `e1`.`test_id`\n+ WHERE `e1`.`publisher_id` IN (?, ?, ?)\n+ ORDER BY `e1`.`id` ASC;\n+\n+SELECT `e0`.* FROM `author` AS `e0` WHERE `e0`.`id` IN (?);\n+```\n+\n+For mongo driver its even simpler as no pivot tables are involved:\n+\n+```typescript\n+db.getCollection(\"book-tag\").find({}).toArray();\n+db.getCollection(\"book\").find({\"tags\":{\"$in\":[...]}}).toArray();\n+db.getCollection(\"publisher\").find({\"_id\":{\"$in\":[...]}}).toArray();\n+db.getCollection(\"test\").find({\"_id\":{\"$in\":[...]}}).toArray();\n+db.getCollection(\"author\").find({\"_id\":{\"$in\":[...]}}).toArray();\n+```\n+\n+## Populating already loaded entities\n+\n+To populate existing entities, you can use `em.populate()`.\n+\n+```typescript\n+const authors = await orm.em.createQueryBuilder(Author).select('*').getResult();\n+await em.populate(authors, ['books.tags']);\n+\n+// now your Author entities will have `books` collections populated, \n+// as well as they will have their `tags` collections populated.\n+console.log(authors[0].books[0].tags[0]); // initialized BookTag\n+```\n", "propagation.md": "@@ -0,0 +1,51 @@\n+---\n+title: Propagation\n+---\n+\n+By default MikroORM will propagate all changes made to one side of bi-directional relations\n+to the other side, keeping them in sync. This works for all relations, including M:1 and 1:1.\n+As part of the discovery process, all M:1 and 1:1 properties are re-defined as getter/setter.\n+\n+```typescript\n+const author = new Author(...);\n+const book = new Book(...);\n+book.author = author;\n+console.log(author.books.contains(book)); // true\n+```\n+\n+> You can disable this behaviour via `propagateToOneOwner` option.\n+\n+## Propagation of Collection's add() and remove() operations\n+\n+When you use one of `Collection.add()` method, the item is added to given collection, \n+and this action is also propagated to its counterpart. \n+\n+```typescript\n+// one to many\n+const author = new Author(...);\n+const book = new Book(...);\n+\n+author.books.add(book);\n+console.log(book.author); // author will be set thanks to the propagation\n+```\n+\n+For M:N this works in both ways, either from owning side, or from inverse side. \n+\n+```typescript\n+// many to many works both from owning side and from inverse side\n+const book = new Book(...);\n+const tag = new BookTag(...);\n+\n+book.tags.add(tag);\n+console.log(tag.books.contains(book)); // true\n+\n+tag.books.add(book);\n+console.log(book.tags.contains(tag)); // true\n+``` \n+\n+> Collections on both sides have to be initialized, otherwise propagation won't work.\n+\n+> Although this propagation works also for M:N inverse side, you should always use owning\n+> side to manipulate the collection.\n+\n+Same applies for `Collection.remove()`.\n", "property-validation.md": "@@ -0,0 +1,51 @@\n+---\n+title: Property Validation\n+---\n+\n+> Since v4.0.3 the validation needs to be explicitly enabled via `validate: true`.\n+> It has performance implications and usually should not be needed, as long as\n+> you don't modify your entities via `Object.assign()`.\n+\n+`MirkoORM` will validate your properties before actual persisting happens. It will try to fix wrong \n+data types for you automatically. If automatic conversion fails, it will throw an error. You can \n+enable strict mode to disable this feature and let ORM throw errors instead. Validation is triggered \n+when persisting the entity. \n+\n+```typescript\n+// number instead of string will throw\n+const author = new Author('test', 'test');\n+wrap(author).assign({ name: 111, email: 222 });\n+await orm.em.persistAndFlush(author); // throws \"Validation error: trying to set Author.name of type 'string' to '111' of type 'number'\"\n+\n+// string date with unknown format will throw\n+wrap(author).assign(author, { name: '333', email: '444', born: 'asd' });\n+await orm.em.persistAndFlush(author); // throws \"Validation error: trying to set Author.born of type 'date' to 'asd' of type 'string'\"\n+\n+// string date with correct format will be auto-corrected\n+wrap(author).assign({ name: '333', email: '444', born: '2018-01-01' });\n+await orm.em.persistAndFlush(author);\n+console.log(author.born).toBe(true); // instance of Date\n+\n+// Date object will be ok\n+wrap(author).assign({ born: new Date() });\n+await orm.em.persistAndFlush(author);\n+console.log(author.born).toBe(true); // instance of Date\n+\n+// null will be ok\n+wrap(author).assign({ born: null });\n+await orm.em.persistAndFlush(author);\n+console.log(author.born); // null\n+\n+// string number with correct format will be auto-corrected\n+wrap(author).assign({ age: '21' });\n+await orm.em.persistAndFlush(author);\n+console.log(author.age); // number 21\n+\n+// string instead of number with will throw\n+wrap(author).assign({ age: 'asd' });\n+await orm.em.persistAndFlush(author); // throws \"Validation error: trying to set Author.age of type 'number' to 'asd' of type 'string'\"\n+wrap(author).assign({ age: new Date() });\n+await orm.em.persistAndFlush(author); // throws \"Validation error: trying to set Author.age of type 'number' to '2019-01-17T21:14:23.875Z' of type 'date'\"\n+wrap(author).assign({ age: false });\n+await orm.em.persistAndFlush(author); // throws \"Validation error: trying to set Author.age of type 'number' to 'false' of type 'boolean'\"\n+```\n", "query-builder-api.md": "@@ -0,0 +1,50 @@\n+---\n+title: Query Builder API\n+---\n+\n+`QueryBuilder` provides fluent interface with these methods:\n+\n+```typescript\n+select(fields: Field<T> | Field<T>[], distinct?: boolean): QueryBuilder;\n+addSelect(fields: string | string[]): QueryBuilder;\n+insert(data: any): QueryBuilder;\n+update(data: any): QueryBuilder;\n+delete(cond?: QBFilterQuery): QueryBuilder;\n+truncate(): QueryBuilder;\n+count(field?: string | string[], distinct?: boolean): QueryBuilder;\n+join(field: string, alias: string, cond?: QBFilterQuery, type?: 'leftJoin' | 'innerJoin' | 'pivotJoin', path?: string): QueryBuilder;\n+leftJoin(field: string, alias: string, cond?: QBFilterQuery): QueryBuilder;\n+withSubQuery(subQuery: KnexQueryBuilder, alias: string): QueryBuilder;\n+where(cond: QBFilterQuery<T>, operator?: keyof typeof GroupOperator): QueryBuilder;\n+where(cond: string, params?: any[], operator?: keyof typeof GroupOperator): QueryBuilder;\n+andWhere(cond: QBFilterQuery<T>): QueryBuilder;\n+andWhere(cond: string, params?: any[]): QueryBuilder;\n+orWhere(cond: QBFilterQuery<T>): QueryBuilder;\n+orWhere(cond: string, params?: any[]): QueryBuilder;\n+orderBy(orderBy: QueryOrderMap): QueryBuilder;\n+groupBy(fields: (string | keyof T) | (string | keyof T)[]): QueryBuilder;\n+having(cond?: QBFilterQuery | string, params?: any[]): QueryBuilder;\n+raw(sql: string): Raw;\n+limit(limit?: number, offset?: number): QueryBuilder;\n+offset(offset?: number): QueryBuilder;\n+withSchema(schema?: string): QueryBuilder;\n+setLockMode(mode?: LockMode): QueryBuilder;\n+setFlag(flag: QueryFlag): QueryBuilder;\n+unsetFlag(flag: QueryFlag): QueryBuilder;\n+getKnexQuery(): KnexQueryBuilder;\n+getQuery(): string;\n+getParams(): readonly Value[];\n+getAliasForJoinPath(path: string): string | undefined;\n+getNextAlias(prefix?: string): string;\n+execute<U = any>(method?: 'all' | 'get' | 'run', mapResults?: boolean): Promise<U>;\n+getResult(): Promise<T[]>;\n+getResultList(): Promise<T[]>;\n+getSingleResult(): Promise<T | null>;\n+/**\n+ * Returns knex instance with sub-query aliased with given alias.\n+ * You can provide `EntityName.propName` as alias, then the field name will be used based on the metadata\n+ */\n+as(alias: string): KnexQueryBuilder;\n+clone(): QueryBuilder<T>;\n+getKnex(): KnexQueryBuilder;\n+```\n", "query-builder.md": "@@ -0,0 +1,305 @@\n+---\n+title: Using Query Builder\n+---\n+\n+When you need to execute some SQL query without all the ORM stuff involved, you can either\n+compose the query yourself, or use the `QueryBuilder` helper to construct the query for you:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Author);\n+qb.update({ name: 'test 123', type: PublisherType.GLOBAL }).where({ id: 123, type: PublisherType.LOCAL });\n+\n+console.log(qb.getQuery());\n+// update `publisher2` set `name` = ?, `type` = ? where `id` = ? and `type` = ?\n+\n+console.log(qb.getParams());\n+// ['test 123', PublisherType.GLOBAL, 123, PublisherType.LOCAL]\n+\n+// run the query\n+const res1 = await qb.execute();\n+```\n+\n+`QueryBuilder` also supports [smart query conditions](query-conditions.md).\n+\n+## Using Knex.js\n+\n+Under the hood, `QueryBuilder` uses [`Knex.js`](https://knexjs.org) to compose and run queries.\n+You can access configured `knex` instance via `qb.getKnexQuery()` method:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Author);\n+qb.update({ name: 'test 123', type: PublisherType.GLOBAL }).where({ id: 123, type: PublisherType.LOCAL });\n+const knex = qb.getKnexQuery(); // instance of Knex' QueryBuilder\n+\n+// do what ever you need with `knex`\n+\n+const res = await orm.em.getConnection().execute(knex);\n+const entities = res.map(a => orm.em.map(Author, a));\n+console.log(entities); // Author[]\n+```\n+\n+You can also get clear and configured knex instance from the connection via `getKnex()` method.\n+As this method is not available on the base `Connection` class, you will need to either manually\n+type cast the connection to `AbstractSqlConnection` (or the actual implementation you are using, \n+e.g. `MySqlConnection`), or provide correct driver type hint to your `EntityManager` instance, \n+which will be then automatically inferred in `em.getConnection()` method.\n+\n+> Driver and connection implementations are not directly exported from `@mikro-orm/core` module. \n+> You can import them from the driver packages (e.g. `import { PostgreSqlDriver } from '@mikro-orm/postgresql'`).\n+\n+```typescript\n+const conn = orm.em.getConnection() as AbstractSqlConnection;\n+// you can make sure the `em` is correctly typed to `EntityManager<AbstractSqlDriver>`\n+// or one of its implementations:\n+// const em: EntityManager<AbstractSqlDriver> = orm.em;\n+\n+const knex = conn.getKnex();\n+\n+// do what ever you need with `knex`\n+\n+const res = await knex;\n+```\n+\n+## Running Native SQL Query\n+\n+You can run native SQL via underlying connection\n+\n+```typescript\n+const connection = orm.em.getConnection();\n+const res = await connection.execute('select 1 as count');\n+console.log(res); // res is array of objects: `[ { count: 1 } ]`\n+```\n+\n+## Executing the Query\n+\n+You can use `execute(method = 'all', mapResults = true)`'s parameters to control form of result:\n+\n+```typescript\n+const res1 = await qb.execute('all'); // returns array of objects, default behavior\n+const res2 = await qb.execute('get'); // returns single object\n+const res3 = await qb.execute('run'); // returns object like `{ affectedRows: number, insertId: number, row: any }`\n+```\n+\n+Second argument can be used to disable mapping of database columns to property names (which \n+is enabled by default). In following example, `Book` entity has `createdAt` property defined \n+with implicit underscored field name `created_at`:\n+\n+```typescript\n+const res4 = await orm.em.createQueryBuilder(Book).select('*').execute('get', true);\n+console.log(res4); // `createdAt` will be defined, while `created_at` will be missing\n+const res5 = await orm.em.createQueryBuilder(Book).select('*').execute('get', false);\n+console.log(res5); // `created_at` will be defined, while `createdAt` will be missing\n+```\n+\n+To get entity instances from the QueryBuilder result, you can use `getResult()` and `getSingleResult()`\n+methods:\n+\n+```typescript\n+const book = await orm.em.createQueryBuilder(Book).select('*').where({ id: 1 }).getSingleResult();\n+console.log(book instanceof Book); // true\n+const books = await orm.em.createQueryBuilder(Book).select('*').getResult();\n+console.log(books[0] instanceof Book); // true\n+```\n+\n+> You can also use `qb.getResultList()` which is alias to `qb.getResult()`.\n+\n+## Mapping Raw Results to Entities\n+\n+Another way to create entity from raw results (that are not necessarily mapped to entity properties)\n+is to use `map()` method of `EntityManager`, that is basically a shortcut for mapping results\n+via `IDatabaseDriver.mapResult()` (which converts field names to property names - e.g. `created_at`\n+to `createdAt`) and `merge()` which converts the data to entity instance and makes it managed. \n+\n+This method comes handy when you want to use 3rd party query builders, where the result is not \n+mapped to entity properties automatically:\n+\n+```typescript\n+const results = await knex.select('*').from('users').where(knex.raw('id = ?', [id]));\n+const users = results.map(user => orm.em.map(User, user));\n+\n+// or use EntityRepository.map()\n+const repo = orm.em.getRepository(User);\n+const users = results.map(user => repo.map(user));\n+```\n+\n+## Implicit Joining\n+\n+`QueryBuilder` supports automatic joining based on entity metadata:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(BookTag, 't');\n+qb.select('*').where({ books: 123 });\n+\n+console.log(qb.getQuery());\n+// select `t`.*, `e1`.`book_tag_id`, `e1`.`book_uuid_pk`\n+// from `book_tag` as `t`\n+// left join `book_to_book_tag` as `e1` ON `t`.`id` = `e1`.`book_tag_id`\n+// where `e1`.`book_uuid_pk` = ?\n+```\n+\n+This also works for multiple levels of nesting:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Author);\n+qb.select('*')\n+ .where({ books: { tags: { name: 'Cool' } } })\n+ .orderBy({ books: { tags: { createdBy: QueryOrder.DESC } } });\n+\n+console.log(qb.getQuery());\n+// select `e0`.* \n+// from `author` as `e0` \n+// left join `book2` as `e1` on `e0`.`id` = `e1`.`author_id` \n+// left join `book2_to_book_tag2` as `e3` on `e1`.`uuid_pk` = `e3`.`book2_uuid_pk` \n+// left join `book_tag2` as `e2` on `e3`.`book_tag2_id` = `e2`.`id` \n+// where `e2`.`name` = ? \n+// order by `e1`.`tags` asc\n+```\n+\n+This is currently available only for filtering (`where`) and sorting (`orderBy`), only \n+the root entity will be selected. To populate its relationships, you can use [`em.populate()`](nested-populate.md).\n+\n+## Explicit Joining\n+\n+Another way is to manually specify join property via `join()`/`leftJoin()` methods:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(BookTag, 't');\n+qb.select(['b.uuid', 'b.*', 't.*'], true)\n+ .join('t.books', 'b')\n+ .where({ 'b.title': 'test 123' })\n+ .limit(2, 1);\n+\n+console.log(qb.getQuery());\n+// select distinct `b`.`uuid_pk`, `b`.*, `t`.*, `e1`.`book_tag_id`, `e1`.`book_uuid_pk` from `book_tag` as `t`\n+// join `book_to_book_tag` as `e1` ON `t`.`id` = `e1`.`book_tag_id`\n+// join `book` as `b` ON `e1`.`book_uuid_pk` = `b`.`uuid_pk`\n+// where `b`.`title` = ?\n+// limit ? offset ?\n+```\n+\n+## Complex Where Conditions\n+\n+There are multiple ways to construct complex query conditions. You can either write parts of SQL\n+manually, use `andWhere()`/`orWhere()`, or provide condition object:\n+\n+### Custom SQL in where\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(BookTag, 't');\n+qb.select(['b.*', 't.*'])\n+ .leftJoin('t.books', 'b')\n+ .where('b.title = ? or b.title = ?', ['test 123', 'lol 321'])\n+ .andWhere('1 = 1')\n+ .orWhere('1 = 2')\n+ .limit(2, 1);\n+\n+console.log(qb.getQuery());\n+// select `b`.*, `t`.*, `e1`.`book_tag_id`, `e1`.`book_uuid_pk` from `book_tag` as `t`\n+// left join `book_to_book_tag` as `e1` ON `t`.`id` = `e1`.`book_tag_id`\n+// left join `book` as `b` ON `e1`.`book_uuid_pk` = `b`.`uuid_pk`\n+// where (((b.title = ? or b.title = ?) and (1 = 1)) or (1 = 2))\n+// limit ? offset ?\n+```\n+\n+### andWhere() and orWhere()\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(BookTag, 't');\n+qb.select(['b.*', 't.*'])\n+ .leftJoin('t.books', 'b')\n+ .where('b.title = ? or b.title = ?', ['test 123', 'lol 321'])\n+ .andWhere('1 = 1')\n+ .orWhere('1 = 2')\n+ .limit(2, 1);\n+\n+console.log(qb.getQuery());\n+// select `b`.*, `t`.*, `e1`.`book_tag_id`, `e1`.`book_uuid_pk` from `book_tag` as `t`\n+// left join `book_to_book_tag` as `e1` ON `t`.`id` = `e1`.`book_tag_id`\n+// left join `book` as `b` ON `e1`.`book_uuid_pk` = `b`.`uuid_pk`\n+// where (((b.title = ? or b.title = ?) and (1 = 1)) or (1 = 2))\n+// limit ? offset ?\n+```\n+\n+### Conditions Object\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Test);\n+qb.select('*').where({ $and: [{ id: { $nin: [3, 4] } }, { id: { $gt: 2 } }] });\n+\n+console.log(qb.getQuery());\n+// select `e0`.* from `test` as `e0` where (`e0`.`id` not in (?, ?) and `e0`.`id` > ?)\n+```\n+\n+## Using sub-queries\n+\n+You can use sub-queries in selects or in where conditions. To select subquery, use\n+`qb.as(alias)` method: \n+\n+> The dynamic property (`booksTotal`) needs to be defined at the entity level (as `persist: false`).\n+\n+```typescript\n+const knex = orm.em.getKnex();\n+const qb1 = orm.em.createQueryBuilder(Book2, 'b').count('b.uuid', true).where({ author: knex.ref('a.id') }).as('Author2.booksTotal');\n+const qb2 = orm.em.createQueryBuilder(Author2, 'a');\n+qb2.select(['*', qb1]).orderBy({ booksTotal: 'desc' });\n+\n+console.log(qb2.getQuery());\n+// select `a`.*, (select count(distinct `b`.`uuid_pk`) as `count` from `book2` as `b` where `b`.`author_id` = `a`.`id`) as `books_total` from `author2` as `a` order by `books_total` desc\n+```\n+\n+```typescript\n+const knex = orm.em.getKnex();\n+const qb3 = orm.em.createQueryBuilder(Book2, 'b').count('b.uuid', true).where({ author: knex.ref('a.id') }).as('books_total');\n+const qb4 = orm.em.createQueryBuilder(Author2, 'a');\n+qb4.select(['*', qb3]).orderBy({ booksTotal: 'desc' });\n+\n+console.log(qb4.getQuery());\n+// select `a`.*, (select count(distinct `b`.`uuid_pk`) as `count` from `book2` as `b` where `b`.`author_id` = `a`.`id`) as `books_total` from `author2` as `a` order by `books_total` desc\n+```\n+\n+When you want to filter by sub-query, you will need to register it first via `qb.withSubquery()`:\n+\n+> The dynamic property (`booksTotal`) needs to be defined at the entity level (as `persist: false`).\n+> You always need to use prefix in the `qb.withSchema()` (so `a.booksTotal`). \n+\n+```typescript\n+const knex = orm.em.getKnex();\n+const qb1 = orm.em.createQueryBuilder(Book2, 'b').count('b.uuid', true).where({ author: knex.ref('a.id') }).getKnexQuery();\n+const qb2 = orm.em.createQueryBuilder(Author2, 'a');\n+qb2.select('*').withSubQuery(qb1, 'a.booksTotal').where({ 'a.booksTotal': { $in: [1, 2, 3] } });\n+\n+console.log(qb2.getQuery());\n+// select `a`.* from `author2` as `a` where (select count(distinct `b`.`uuid_pk`) as `count` from `book2` as `b` where `b`.`author_id` = `a`.`id`) in (?, ?, ?)\n+```\n+\n+```typescript\n+const knex = orm.em.getKnex();\n+const qb3 = orm.em.createQueryBuilder(Book2, 'b').count('b.uuid', true).where({ author: knex.ref('a.id') }).getKnexQuery();\n+const qb4 = orm.em.createQueryBuilder(Author2, 'a');\n+qb4.select('*').withSubQuery(qb3, 'a.booksTotal').where({ 'a.booksTotal': 1 });\n+\n+console.log(qb4.getQuery());\n+// select `a`.* from `author2` as `a` where (select count(distinct `b`.`uuid_pk`) as `count` from `book2` as `b` where `b`.`author_id` = `a`.`id`) = ?\n+```\n+\n+## Referring to column in update queries\n+\n+You can use `qb.raw()` to insert raw SQL snippets like this:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Book);\n+qb.update({ price: qb.raw('price + 1') }).where({ uuid: '123' });\n+\n+console.log(qb.getQuery());\n+// update `book` set `price` = price + 1 where `uuid_pk` = ?\n+```\n+\n+## Locking support\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Test);\n+qb.select('*').where({ name: 'Lol 321' }).setLockMode(LockMode.PESSIMISTIC_READ);\n+\n+console.log(qb.getQuery()); // for MySQL\n+// select `e0`.* from `test` as `e0` where `e0`.`name` = ? lock in share mode\n+```\n", "query-conditions.md": "@@ -0,0 +1,102 @@\n+---\n+title: Smart Query Conditions\n+---\n+\n+When you want to make complex queries, you can easily end up with a lot of boilerplate code\n+full of curly brackets:\n+\n+```typescript\n+const res = await orm.em.find(Author, { $and: [\n+ { id: { $in: [1, 2, 7] }, },\n+ { id: { $nin: [3, 4] }, },\n+ { id: { $gt: 5 }, },\n+ { id: { $lt: 10 }, },\n+ { id: { $gte: 7 }, },\n+ { id: { $lte: 8 }, },\n+ { id: { $ne: 9 }, },\n+] });\n+```\n+\n+For AND condition with single field, you can also do this:\n+\n+```typescript\n+const res = await orm.em.find(Author, { \n+ id: { \n+ $in: [1, 2, 7],\n+ $nin: [3, 4],\n+ $gt: 5,\n+ $lt: 10,\n+ $gte: 7,\n+ $lte: 8,\n+ $ne: 9,\n+ },\n+});\n+```\n+\n+Another way to do this by including the operator in your keys:\n+\n+```typescript\n+const res = await orm.em.find(Author, { $and: [\n+ { 'id:in': [1, 2, 7] },\n+ { 'id:nin': [3, 4] },\n+ { 'id:gt': 5 },\n+ { 'id:lt': 10 },\n+ { 'id:gte': 7 },\n+ { 'id:lte': 8 },\n+ { 'id:ne': 9 },\n+] });\n+```\n+\n+For comparison operators, you can also use their mathematical symbols:\n+\n+```typescript\n+const res = await orm.em.find(Author, { $and: [\n+ { 'id >': 5 },\n+ { 'id <': 10 },\n+ { 'id >=': 7 },\n+ { 'id <=': 8 },\n+ { 'id !=': 9 },\n+] });\n+```\n+\n+> Keys with operators like this will cause TypeScript errors as there is no way to support \n+> them on the typings side. They are still supported, but you will need to cast the condition\n+> to `any` to use them. \n+\n+There is also shortcut for `$in` - simply provide array as value and it \n+will be converted automatically:\n+\n+```typescript\n+const res = await orm.em.find(Author, { favouriteBook: [1, 2, 7] });\n+```\n+\n+For primary key lookup, you can provide the array directly to `em.find()`:\n+\n+```typescript\n+const res = await orm.em.find(Author, [1, 2, 7]);\n+```\n+\n+## List of supported operators\n+\n+### Comparison\n+\n+| operator | name | description |\n+|----------|--------------------|-------------|\n+| `$eq`\t | equals | Matches values that are equal to a specified value. |\n+| `$gt`\t | greater | Matches values that are greater than a specified value. |\n+| `$gte` | greater or equal | Matches values that are greater than or equal to a specified value. |\n+| `$in`\t | contains | Matches any of the values specified in an array. |\n+| `$lt`\t | lower | Matches values that are less than a specified value. |\n+| `$lte` | lower or equal | Matches values that are less than or equal to a specified value. |\n+| `$ne`\t | not equal | Matches all values that are not equal to a specified value. |\n+| `$nin` | not contains | Matches none of the values specified in an array. |\n+| `$like` | like | Uses LIKE operator |\n+| `$re` | regexp | Uses REGEXP operator |\n+\n+### Logical\n+\n+| operator | description |\n+|----------|-------------|\n+| `$and` | Joins query clauses with a logical AND returns all documents that match the conditions of both clauses. |\n+| `$not` | Inverts the effect of a query expression and returns documents that do not match the query expression. |\n+| `$or` | Joins query clauses with a logical OR returns all documents that match the conditions of either clause. |\n", "quick-start.md": "@@ -0,0 +1,192 @@\n+---\n+title: Quick Start\n+---\n+\n+First install the module via `yarn` or `npm` and do not forget to install the \n+database driver as well:\n+\n+```sh\n+yarn add @mikro-orm/core @mikro-orm/mongodb # for mongo\n+yarn add @mikro-orm/core @mikro-orm/mysql # for mysql/mariadb\n+yarn add @mikro-orm/core @mikro-orm/mariadb # for mysql/mariadb\n+yarn add @mikro-orm/core @mikro-orm/postgresql # for postgresql\n+yarn add @mikro-orm/core @mikro-orm/sqlite # for sqlite\n+```\n+\n+or\n+\n+```sh\n+npm i -s @mikro-orm/core @mikro-orm/mongodb # for mongo\n+npm i -s @mikro-orm/core @mikro-orm/mysql # for mysql/mariadb\n+npm i -s @mikro-orm/core @mikro-orm/mariadb # for mysql/mariadb\n+npm i -s @mikro-orm/core @mikro-orm/postgresql # for postgresql\n+npm i -s @mikro-orm/core @mikro-orm/sqlite # for sqlite\n+```\n+\n+Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html)\n+as well as `esModuleInterop` in `tsconfig.json` via:\n+\n+```json\n+\"experimentalDecorators\": true,\n+\"emitDecoratorMetadata\": true,\n+\"esModuleInterop\": true\n+```\n+\n+Then call `MikroORM.init` as part of bootstrapping your app:\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: ['./dist/entities'], // path to your JS entities (dist), relative to `baseDir`\n+ dbName: 'my-db-name',\n+ type: 'mongo',\n+ clientUrl: '...', // defaults to 'mongodb://localhost:27017' for mongodb driver\n+});\n+console.log(orm.em); // access EntityManager via `em` property\n+```\n+\n+There are more ways to configure your entities, take a look at \n+[installation page](https://mikro-orm.io/installation/).\n+\n+> Read more about all the possible configuration options in [Advanced Configuration](https://mikro-orm.io/docs/configuration) section.\n+\n+Then you will need to fork entity manager for each request so their \n+[identity maps](https://mikro-orm.io/identity-map/) will not collide. \n+To do so, use the `RequestContext` helper:\n+\n+```typescript\n+const app = express();\n+\n+app.use((req, res, next) => {\n+ RequestContext.create(orm.em, next);\n+});\n+```\n+\n+> You should register this middleware as the last one just before request handlers and before\n+> any of your custom middleware that is using the ORM. There might be issues when you register \n+> it before request processing middleware like `queryParser` or `bodyParser`, so definitely \n+> register the context after them. \n+\n+More info about `RequestContext` is described [here](https://mikro-orm.io/identity-map/#request-context).\n+\n+Now you can start defining your entities (in one of the `entities` folders). This is how\n+simple entity can look like in mongo driver:\n+\n+```typescript title=\"./entities/MongoBook.ts\"\n+@Entity()\n+export class MongoBook {\n+\n+ @PrimaryKey()\n+ _id: ObjectID;\n+\n+ @SerializedPrimaryKey()\n+ id: string;\n+\n+ @Property()\n+ title: string;\n+\n+ @ManyToOne()\n+ author: Author;\n+\n+ @ManyToMany()\n+ tags = new Collection<BookTag>(this);\n+\n+ constructor(title: string, author: Author) {\n+ this.title = title;\n+ this.author = author;\n+ }\n+\n+}\n+```\n+\n+For SQL drivers, you can use `id: number` PK:\n+\n+```typescript title=\"./entities/SqlBook.ts\"\n+@Entity()\n+export class SqlBook {\n+\n+ @PrimaryKey()\n+ id: number;\n+\n+}\n+```\n+\n+Or if you want to use UUID primary keys:\n+\n+```typescript title=\"./entities/UuidBook.ts\"\n+import { v4 } from 'uuid';\n+\n+@Entity()\n+export class UuidBook {\n+\n+ @PrimaryKey()\n+ uuid = v4();\n+\n+}\n+```\n+\n+More information can be found in\n+[defining entities section](https://mikro-orm.io/defining-entities/) in docs.\n+\n+When you have your entities defined, you can start using ORM either via `EntityManager`\n+or via `EntityRepository`s.\n+\n+To save entity state to database, you need to persist it. Persist takes care or deciding \n+whether to use `insert` or `update` and computes appropriate change-set. Entity references\n+that are not persisted yet (does not have identifier) will be cascade persisted automatically. \n+\n+```typescript\n+// use constructors in your entities for required parameters\n+const author = new Author('Jon Snow', '[email protected]');\n+author.born = new Date();\n+\n+const publisher = new Publisher('7K publisher');\n+\n+const book1 = new Book('My Life on The Wall, part 1', author);\n+book1.publisher = publisher;\n+const book2 = new Book('My Life on The Wall, part 2', author);\n+book2.publisher = publisher;\n+const book3 = new Book('My Life on The Wall, part 3', author);\n+book3.publisher = publisher;\n+\n+// just persist books, author and publisher will be automatically cascade persisted\n+await orm.em.persistAndFlush([book1, book2, book3]);\n+```\n+\n+To fetch entities from database you can use `find()` and `findOne()` of `EntityManager`: \n+\n+```typescript\n+const authors = orm.em.find(Author, {});\n+\n+for (const author of authors) {\n+ console.log(author); // instance of Author entity\n+ console.log(author.name); // Jon Snow\n+\n+ for (const book of author.books) { // iterating books collection\n+ console.log(book); // instance of Book entity\n+ console.log(book.title); // My Life on The Wall, part 1/2/3\n+ }\n+}\n+```\n+\n+More convenient way of fetching entities from database is by using `EntityRepository`, that\n+carries the entity name so you do not have to pass it to every `find` and `findOne` calls:\n+\n+```typescript\n+const booksRepository = orm.em.getRepository(Book);\n+\n+// with sorting, limit and offset parameters, populating author references\n+const books = await booksRepository.find({ author: '...' }, ['author'], { title: QueryOrder.DESC }, 2, 1);\n+\n+// or with options object\n+const books = await booksRepository.find({ author: '...' }, { \n+ populate: ['author'],\n+ limit: 1,\n+ offset: 2,\n+ sort: { title: QueryOrder.DESC },\n+});\n+\n+console.log(books); // Book[]\n+```\n+\n+Take a look at docs about [working with `EntityManager`](https://mikro-orm.io/entity-manager/)\n+or [using `EntityRepository` instead](https://mikro-orm.io/repositories/).\n", "read-connections.md": "@@ -0,0 +1,43 @@\n+---\n+title: Read Replica Connections\n+---\n+\n+Users can specify multiple read connections via `replicas` option. You can provide only fields \n+that differ from master connection, rest will be taken from it.\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: [Author, ...],\n+ dbName: `my_database`,\n+ type: 'mysql',\n+ user: 'master_user',\n+ host: 'master_host',\n+ replicas: [\n+ { name: 'read-1', host: 'read_host_1', user: 'read_user' },\n+ { name: 'read-2', host: 'read_host_2' }, // user omitted, will be taken from master connection\n+ ],\n+});\n+```\n+\n+By default select queries will use random read connection if not inside transaction. You can \n+specify connection type manually in `em.getConnection(type: 'read' | 'write')`.\n+\n+```typescript\n+const connection = orm.em.getConnection(); // write connection\n+const readConnection = orm.em.getConnection('read'); // random read connection\n+\n+const qb1 = orm.em.createQueryBuilder(Author);\n+const res1 = await qb1.select('*').execute(); // random read connection\n+\n+const qb2 = orm.em.createQueryBuilder(Author, 'a', 'write');\n+const res2 = await qb2.select('*').execute(); // write connection\n+\n+const qb3 = orm.em.createQueryBuilder(Author);\n+const res3 = await qb3.update(...).where(...).execute(); // write connection\n+\n+// all queries inside a transaction will use write connection\n+await orm.em.transactional(async em => {\n+ const a = await em.findOne(Author, 1); // write connection\n+ a.name = 'test'; // will trigger update on write connection once flushed\n+});\n+```\n", "relationships.md": "@@ -0,0 +1,172 @@\n+---\n+title: Modeling Entity Relationships\n+sidebar_label: Modeling Entity Relationships\n+---\n+\n+There are 4 types of entity relationships in MikroORM: \n+\n+- ManyToOne\n+- OneToMany\n+- OneToOne\n+- ManyToMany\n+\n+Relations can be unidirectional and bidirectional. Unidirectional are defined only on one \n+side (the owning side). Bidirectional are defined on both sides, while one is owning side \n+(where references are store), marked by `inversedBy` attribute pointing to the inverse side.\n+On the inversed side we define it with `mappedBy` attribute pointing back to the owner:\n+\n+> When modeling bidirectional relationship, you can also omit the `inversedBy` attribute, \n+> defining `mappedBy` on the inverse side is enough as it will be auto-wired. \n+\n+## ManyToOne\n+\n+> Many instances of the current Entity refer to One instance of the referred Entity.\n+\n+There are multiple ways how to define the relationship, all of following is equivalent:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @ManyToOne() // plain decorator is enough, type will be sniffer via reflection!\n+ author1!: Author;\n+\n+ @ManyToOne(() => Author) // you can specify type manually as a callback\n+ author2!: Author;\n+\n+ @ManyToOne('Author') // or as a string\n+ author3!: Author;\n+\n+ @ManyToOne({ entity: () => Author }) // or use options object\n+ author4!: Author;\n+\n+}\n+```\n+\n+You can also specify how operations on given entity should [cascade](cascading.md) \n+to the referred entity.\n+\n+## OneToMany\n+\n+> One instance of the current Entity has Many instances (references) to the referred Entity.\n+\n+Again, all of following is equivalent:\n+\n+```typescript\n+@Entity()\n+export class Author {\n+\n+ @OneToMany(() => Book, book => book.author)\n+ books1 = new Collection<Book>(this);\n+\n+ @OneToMany('Book', 'author')\n+ books2 = new Collection<Book>(this);\n+\n+ @OneToMany({ mappedBy: book => book.author }) // referenced entity type can be sniffer too\n+ books3 = new Collection<Book>(this);\n+\n+ @OneToMany({ entity: () => Book, mappedBy: 'author', orphanRemoval: true })\n+ books4 = new Collection<Book>(this);\n+\n+}\n+```\n+\n+As you can see, OneToMany is the inverse side of ManyToOne (which is the owning side).\n+More about how collections work can be found on [collections page](collections.md). \n+\n+You can also specify how operations on given entity should [cascade](cascading.md) to the referred\n+entities. There is also more aggressive remove mode called [Orphan Removal](cascading.md#orphan-removal) \n+(`books4` example).\n+\n+## OneToOne\n+\n+> One instance of the current Entity refers to One instance of the referred Entity.\n+\n+This is a variant of ManyToOne, where there is always just one entity on both sides. This means\n+that the foreign key column is also unique.\n+\n+### Owning Side\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ // when none of `owner/inverseBy/mappedBy` is provided, it will be considered owning side\n+ @OneToOne()\n+ bestFriend1!: User;\n+\n+ // side with `inversedBy` is the owning one, to define inverse side use `mappedBy`\n+ @OneToOne({ inversedBy: 'bestFriend1', orphanRemoval: true })\n+ bestFriend2!: User;\n+\n+ // when defining it like this, you need to specifically mark the owning side with `owner: true`\n+ @OneToOne(() => User, user => user.bestFriend2, { owner: true, orphanRemoval: true })\n+ bestFriend3!: User;\n+\n+}\n+```\n+\n+### Inverse Side\n+\n+```typescript\n+@Entity()\n+export class User {\n+\n+ @OneToOne({ mappedBy: 'bestFriend1' })\n+ bestFriend1!: User;\n+\n+ @OneToOne(() => User, user => user.bestFriend2)\n+ bestFriend2!: User;\n+\n+}\n+```\n+\n+As you can see, relationships can be also self-referencing (all of them. OneToOne also supports \n+[Orphan Removal](cascading.md#orphan-removal). \n+\n+## ManyToMany\n+\n+> Many instances of the current Entity refers to Many instances of the referred Entity.\n+\n+Here are examples of how you can define ManyToMany relationship:\n+\n+### Owning Side\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ // when none of `owner/inverseBy/mappedBy` is provided, it will be considered owning side\n+ @ManyToMany()\n+ tags1 = new Collection<BookTag>(this);\n+\n+ @ManyToMany(() => BookTag, 'books', { owner: true })\n+ tags2 = new Collection<BookTag>(this);\n+\n+ @ManyToMany(() => BookTag, 'books', { owner: true })\n+ tags3 = new Collection<BookTag>(this);\n+\n+ @ManyToMany(() => BookTag, 'books', { owner: true })\n+ tags4 = new Collection<BookTag>(this);\n+\n+ // to define uni-directional many to many, simply provide only \n+ @ManyToMany(() => Author)\n+ friends: Collection<Author> = new Collection<Author>(this);\n+\n+}\n+```\n+\n+### Inverse Side\n+\n+```typescript\n+@Entity()\n+export class BookTag {\n+\n+ // inverse side has to point to the owning side via `mappedBy` attribute/parameter\n+ @ManyToMany(() => Book, book => book.tags)\n+ books = new Collection<Book>(this);\n+\n+}\n+```\n+\n+Again, more information about how collections work can be found on [collections page](collections.md). \n", "repositories-api.md": "@@ -0,0 +1,139 @@\n+---\n+title: EntityRepository API\n+---\n+\n+#### `find(where: FilterQuery<T>, options?: FindOptions): Promise<T[]>`\n+\n+Returns array of entities found for given condition. You can specify `FindOptions` to request\n+population of referenced entities or control the pagination:\n+\n+```typescript\n+export interface FindOptions {\n+ populate?: string[];\n+ orderBy?: { [k: string]: QueryOrder };\n+ limit?: number;\n+ offset?: number;\n+ schema?: string;\n+}\n+```\n+\n+---\n+\n+#### `find(where: FilterQuery<T>, populate?: string[], orderBy?: { [k: string]: QueryOrder }, limit?: number, offset?: number): Promise<T[]>`\n+\n+Same as previous `find` method, just with dedicated parameters for `populate`, `orderBy`, `limit`\n+and `offset`.\n+\n+---\n+\n+#### `findAndCount(where: FilterQuery<T>, populate?: string[], orderBy?: { [k: string]: QueryOrder }, limit?: number, offset?: number): Promise<T[]>`\n+\n+Combination of `find` and `count` methods. \n+\n+---\n+\n+#### `findAll(options?: FindOptions): Promise<T[]>`\n+\n+Returns all entities for given type. \n+\n+---\n+\n+#### `findAll(populate?: string[], orderBy?: { [k: string]: QueryOrder }, limit?: number, offset?: number): Promise<T[]>`\n+\n+Same as previous `findAll` method, just with dedicated parameters for `populate`, `orderBy`, `limit`\n+and `offset`.\n+\n+---\n+\n+#### `findOne(where: FilterQuery<T> | string, populate?: string[]): Promise<T | null>`\n+\n+Finds an entity by given `where` condition. You can use primary key as `where` value, then\n+if the entity is already managed, no database call will be made. \n+\n+---\n+\n+#### `findOneOrFail(where: FilterQuery<T> | string, populate?: string[]): Promise<T>`\n+\n+Just like `findOne`, but throws when entity not found, so it always resolves to given entity. \n+You can customize the error either globally via `findOneOrFailHandler` option, or locally via \n+`failHandler` option in `findOneOrFail` call.\n+\n+---\n+\n+#### `merge(data: EntityData<T>): T`\n+\n+Adds given entity to current Identity Map. After merging, entity becomes managed. \n+This is useful when you want to work with cached entities. \n+\n+---\n+\n+#### `getReference(id: string): T`\n+\n+Gets a reference to the entity identified by the given type and identifier without actually \n+loading it, if the entity is not yet loaded.\n+\n+---\n+\n+#### `count(where?: FilterQuery<T>): Promise<number>`\n+\n+Gets count of entities matching the `where` condition. \n+\n+---\n+\n+#### `persist(entity: AnyEntity | AnyEntity[]): Promise<void>`\n+\n+Tells the EntityManager to make an instance managed and persistent. The entity will be \n+entered into the database at or before transaction commit or as a result of the flush \n+operation. \n+\n+---\n+\n+#### `persistAndFlush(entity: AnyEntity | AnyEntity[]): Promise<void>`\n+\n+Shortcut for `persist` & `flush`.\n+\n+---\n+\n+#### `persistLater(entity: AnyEntity | AnyEntity[]): void`\n+\n+Shortcut for just `persist`, without flushing. Deprecated, use `em.persist()`.\n+\n+---\n+\n+#### `flush(): Promise<void>`\n+\n+Flushes all changes to objects that have been queued up to now to the database.\n+\n+---\n+\n+#### `remove(where: AnyEntity | Reference<AnyEntity> | (AnyEntity | Reference<AnyEntity>)[]): Promise<void>`\n+\n+When provided entity instance as `where` value, then it calls `removeEntity(entity, flush)`, \n+otherwise it fires delete query with given `where` condition. \n+\n+This method fires `beforeDelete` and `afterDelete` hooks only if you provide entity instance. \n+\n+---\n+\n+#### `removeAndFlush(entity: AnyEntity): Promise<void>`\n+\n+Shortcut for `remove` & `flush`.\n+\n+This method fires `beforeDelete` and `afterDelete` hooks. \n+\n+---\n+\n+#### `removeLater(entity: AnyEntity): void`\n+\n+Shortcut for `remove` without flushing. Deprecated, use `em.remove()`.\n+\n+This method fires `beforeDelete` and `afterDelete` hooks. \n+\n+---\n+\n+#### `canPopulate(property: string): boolean`\n+\n+Returns whether given entity has given property which can be populated (is reference or\n+collection).\n+\n+---\n", "repositories.md": "@@ -0,0 +1,83 @@\n+---\n+title: Using EntityRepository instead of EntityManager\n+sidebar_label: Entity Repository\n+---\n+\n+More convenient way of fetching entities from database is by using `EntityRepository`, that\n+carries the entity name so you do not have to pass it to every `find` and `findOne` calls:\n+\n+Example:\n+\n+```typescript\n+const booksRepository = orm.em.getRepository(Book);\n+\n+// with sorting, limit and offset parameters, populating author references\n+const books = await booksRepository.find({ author: '...' }, ['author'], { title: QueryOrder.DESC }, 2, 1);\n+\n+// or with options object\n+const books = await booksRepository.find({ author: '...' }, { \n+ populate: ['author'],\n+ limit: 1,\n+ offset: 2,\n+ sort: { title: QueryOrder.DESC },\n+});\n+\n+console.log(books); // Book[]\n+```\n+\n+## Custom Repository\n+\n+To use custom repository, just extend `EntityRepository<T>` class:\n+\n+```typescript\n+@Repository(Author)\n+export class CustomAuthorRepository extends EntityRepository<Author> {\n+\n+ // your custom methods...\n+ public findAndUpdate(...) {\n+ // ...\n+ }\n+\n+}\n+```\n+\n+You can also omit the `@Repository` decorator and register your repository in `@Entity` \n+decorator instead:\n+\n+```typescript\n+@Entity({ customRepository: () => CustomAuthorRepository })\n+export class Author {\n+ // ...\n+}\n+```\n+\n+Note that we need to pass that repository reference inside a callback so we will not run\n+into circular dependency issues when using entity references inside that repository.\n+\n+Now you can access your custom repository via `em.getRepository()` method.\n+\n+### Inferring custom repository type\n+\n+To have the `em.getRepository()` method return correctly typed custom repository\n+instead of the generic `EntityRepository<T>`, we can use `EntityRepositoryType`\n+symbol:\n+\n+```ts\n+@Entity({ customRepository: () => AuthorRepository })\n+export class Author {\n+\n+ [EntityRepositoryType]?: AuthorRepository;\n+\n+}\n+\n+const repo = em.getRepository(Author); // repo has type AuthorRepository\n+```\n+\n+> You can also register custom base repository (for all entities where you do not specify \n+> `customRepository`) globally, via `MikroORM.init({ entityRepository: CustomBaseRepository })`.\n+\n+> Note that you cannot use both `@Repository(Author)` on the repository and `{ customRepository: () => AuthorRepository }` on the entity at the same time. This will cause a circular dependency and throws an error. Either one of options achieves the same goal.\n+\n+For more examples, take a look at\n+[`tests/EntityManager.mongo.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts)\n+or [`tests/EntityManager.mysql.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/EntityManager.mongo.test.ts).\n", "schema-generator.md": "@@ -0,0 +1,92 @@\n+---\n+title: Schema Generator\n+---\n+\n+> SchemaGenerator can do harm to your database. It will drop or alter tables, indexes, \n+> sequences and such. Please use this tool with caution in development and not on a \n+> production server. It is meant for helping you develop your Database Schema, but NOT \n+> with migrating schema from A to B in production. A safe approach would be generating \n+> the SQL on development server and saving it into SQL Migration files that are executed \n+> manually on the production server.\n+\n+> SchemaTool assumes your project uses the given database on its own. Update and Drop \n+> commands will mess with other tables if they are not related to the current project \n+> that is using MikroORM. Please be careful!\n+\n+To generate schema from your entity metadata, you can use `SchemaGenerator` helper. \n+\n+You can use it via CLI: \n+\n+```sh\n+npx mikro-orm schema:create --dump # Dumps create schema SQL\n+npx mikro-orm schema:update --dump # Dumps update schema SQL\n+npx mikro-orm schema:drop --dump # Dumps drop schema SQL\n+```\n+\n+> You can also use `--run` flag to fire all queries, but be careful as it might break your\n+> database. Be sure to always check the generated SQL first before executing. Do not use\n+> `--run` flag in production! \n+\n+`schema:create` will automatically create the database if it does not exist. \n+\n+`schema:update` drops all unknown tables by default, you can use `--no-drop-tables` \n+to get around it. There is also `--safe` flag that will disable both table dropping as \n+well as column dropping. \n+\n+`schema:drop` will by default drop all database tables. You can use `--drop-db` flag to drop\n+the whole database instead. \n+\n+## Using SchemaGenerator programmatically\n+\n+Or you can create simple script where you initialize MikroORM like this:\n+\n+```typescript title=\"./create-schema.ts\"\n+import { MikroORM } from '@mikro-orm/core';\n+\n+(async () => {\n+ const orm = await MikroORM.init({\n+ entities: [Author, Book, ...],\n+ dbName: 'your-db-name',\n+ // ...\n+ });\n+ const generator = orm.getSchemaGenerator();\n+\n+ const dropDump = await generator.getDropSchemaSQL();\n+ console.log(dropDump);\n+\n+ const createDump = await generator.getCreateSchemaSQL();\n+ console.log(createDump);\n+\n+ const updateDump = await generator.getUpdateSchemaSQL();\n+ console.log(updateDump);\n+\n+ // there is also `generate()` method that returns drop + create queries\n+ const dropAndCreateDump = await generator.generate();\n+ console.log(dropAndCreateDump);\n+\n+ // or you can run those queries directly, but be sure to check them first!\n+ await generator.dropSchema();\n+ await generator.createSchema();\n+ await generator.updateSchema();\n+\n+ await orm.close(true);\n+})();\n+```\n+\n+Then run this script via `ts-node` (or compile it to plain JS and use `node`):\n+\n+```sh\n+$ ts-node create-schema\n+```\n+\n+## Limitations of SQLite\n+\n+There are limitations of SQLite database because of which it behaves differently \n+than other SQL drivers. Namely, it is not possible to:\n+\n+- create foreign key constraints when altering columns\n+- create empty tables without columns\n+- alter column requires nullability\n+\n+Because of this, you can end up with different schema with SQLite, so it is not\n+suggested to use SQLite for integration tests of your application.\n", "serializing.md": "@@ -0,0 +1,102 @@\n+---\n+title: Serializing\n+---\n+\n+By default, all entities are monkey patched with `toObject()` and `toJSON` methods:\n+\n+```typescript\n+export interface AnyEntity<K = number | string> {\n+ toObject(parent?: AnyEntity, isCollection?: boolean): Record<string, any>;\n+ toJSON(...args: any[]): Record<string, any>;\n+ // ...\n+}\n+```\n+\n+When you serialize your entity via `JSON.stringify(entity)`, its `toJSON` method will be \n+called automatically. You can provide custom implementation for `toJSON`, while using \n+`toObject` for initial serialization:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ // ...\n+\n+ toJSON(strict = true, strip = ['id', 'email'], ...args: any[]): { [p: string]: any } {\n+ const o = this.toObject(...args); // do not forget to pass rest params here\n+\n+ if (strict) {\n+ strip.forEach(k => delete o[k]);\n+ }\n+\n+ return o;\n+ }\n+\n+}\n+```\n+\n+> Do not forget to pass rest params when calling `toObject(...args)`, otherwise the results\n+> might not be stable.\n+\n+## Hidden Properties\n+\n+If you want to omit some properties from serialized result, you can mark them with `hidden`\n+flag on `@Property()` decorator:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @Property({ hidden: true })\n+ hiddenField = Date.now();\n+\n+}\n+\n+const book = new Book(...);\n+console.log(book.toObject().hiddenField); // undefined\n+console.log(book.toJSON().hiddenField); // undefined\n+```\n+\n+## Shadow Properties\n+\n+The opposite situation where you want to define a property that lives only in memory (is \n+not persisted into database) can be solved by defining your property as `persist: false`. \n+Such property can be assigned via one of `Entity.assign()`, `em.create()` and \n+`em.merge()`. It will be also part of serialized result. \n+\n+This can be handle when dealing with additional values selected via `QueryBuilder` or \n+MongoDB's aggregations.\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @Property({ persist: false })\n+ count?: number;\n+\n+}\n+\n+const book = new Book(...);\n+book.assign({ count: 123 });\n+console.log(book.toObject().count); // 123\n+console.log(book.toJSON().count); // 123\n+```\n+\n+## Property Serializers\n+\n+As an alternative to custom `toJSON()` method, we can also use property serializers.\n+They allow to specify a callback that will be used when serializing a property:\n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @ManyToOne({ serializer: value => value.name, serializedName: 'authorName' })\n+ author: Author;\n+\n+}\n+\n+const author = new Author('God')\n+const book = new Book(author);\n+console.log(book.toJSON().authorName); // 'God'\n+```\n", "transactions.md": "@@ -0,0 +1,296 @@\n+---\n+title: Transactions and Concurrency\n+---\n+\n+> Starting v3.4, transactions are also supported in [MongoDB driver](usage-with-mongo.md).\n+\n+## Transaction Demarcation\n+\n+Transaction demarcation is the task of defining your transaction boundaries. Proper \n+transaction demarcation is very important because if not done properly it can negatively \n+affect the performance of your application. Many databases and database abstraction \n+layers by default operate in auto-commit mode, which means that every single SQL statement \n+is wrapped in a small transaction. Without any explicit transaction demarcation from your \n+side, this quickly results in poor performance because transactions are not cheap. \n+\n+For the most part, MikroORM already takes care of proper transaction demarcation for you: \n+All the write operations (INSERT/UPDATE/DELETE) are queued until `em.flush()` \n+is invoked which wraps all of these changes in a single transaction.\n+\n+However, MikroORM also allows (and encourages) you to take over and control transaction \n+demarcation yourself.\n+\n+These are two ways to deal with transactions when using the MikroORM and are now described \n+in more detail.\n+\n+### Approach 1: Implicitly\n+\n+The first approach is to use the implicit transaction handling provided by the MikroORM \n+`EntityManager`. Given the following code snippet, without any explicit transaction \n+demarcation:\n+\n+```typescript\n+const user = new User(...);\n+user.name = 'George';\n+await orm.em.persistAndFlush(user);\n+```\n+\n+Since we do not do any custom transaction demarcation in the above code, `em.flush()` \n+will begin and commit/rollback a transaction. This behavior is made possible by the \n+aggregation of the DML operations by the MikroORM and is sufficient if all the data \n+manipulation that is part of a unit of work happens through the domain model and thus \n+the ORM.\n+\n+### Approach 2: Explicitly\n+\n+The explicit alternative is to use the transactions API directly to control the boundaries. \n+The code then looks like this:\n+\n+```typescript\n+await orm.em.transactional(em => {\n+ //... do some work\n+ const user = new User(...);\n+ user.name = 'George';\n+ em.persist(user);\n+});\n+```\n+\n+Or you can use `begin/commit/rollback` methods explicitly. Following example is\n+equivalent to the previous one:\n+\n+```typescript\n+const em = orm.em.fork(false);\n+await em.begin();\n+\n+try {\n+ //... do some work\n+ const user = new User(...);\n+ user.name = 'George';\n+ em.persist(user);\n+ await em.commit(); // will flush before making the actual commit query\n+} catch (e) {\n+ await em.rollback();\n+ throw e;\n+}\n+```\n+\n+Explicit transaction demarcation is required when you want to include custom DBAL operations \n+in a unit of work or when you want to make use of some methods of the EntityManager API \n+that require an active transaction. Such methods will throw a `ValidationError` to inform \n+you of that requirement.\n+\n+`em.transactional(cb)` will flush the inner `EntityManager` prior to transaction commit.\n+\n+### Exception Handling\n+\n+When using implicit transaction demarcation and an exception occurs during \n+`em.flush()`, the transaction is automatically rolled back.\n+\n+When using explicit transaction demarcation and an exception occurs, the transaction should \n+be rolled back immediately as demonstrated in the example above. This can be handled elegantly \n+by the control abstractions shown earlier. Note that when catching Exception you should \n+generally re-throw the exception. If you intend to recover from some exceptions, catch them \n+explicitly in earlier catch blocks (but do not forget to rollback the transaction). All \n+other best practices of exception handling apply similarly (i.e. either log or re-throw, \n+not both, etc.).\n+\n+As a result of this procedure, all previously managed or removed instances of the `EntityManager` \n+become detached. The state of the detached objects will be the state at the point at which the \n+transaction was rolled back. The state of the objects is in no way rolled back and thus the \n+objects are now out of sync with the database. The application can continue to use the detached \n+objects, knowing that their state is potentially no longer accurate.\n+\n+If you intend to start another unit of work after an exception has occurred you should do \n+that with a new `EntityManager`. Simply use `em.fork()` to obtain fresh copy \n+with cleared identity map. \n+\n+## Locking Support\n+\n+### Why we need concurrency control?\n+\n+If transactions are executed serially (one at a time), no transaction concurrency exists. \n+However, if concurrent transactions with interleaving operations are allowed, you may easily \n+run into one of those problems:\n+\n+1. The lost update problem\n+2. The dirty read problem\n+3. The incorrect summary problem\n+\n+To mitigate those problems, MikroORM offers support for Pessimistic and Optimistic locking \n+strategies natively. This allows you to take very fine-grained control over what kind of \n+locking is required for your entities in your application.\n+\n+### Optimistic Locking\n+\n+Database transactions are fine for concurrency control during a single request. However, a \n+database transaction should not span across requests, the so-called \"user think time\". Therefore \n+a long-running \"business transaction\" that spans multiple requests needs to involve several \n+database transactions. Thus, database transactions alone can no longer control concurrency \n+during such a long-running business transaction. Concurrency control becomes the partial \n+responsibility of the application itself.\n+\n+MikroORM has integrated support for automatic optimistic locking via a version field. In \n+this approach any entity that should be protected against concurrent modifications during \n+long-running business transactions gets a version field that is either a simple number \n+(mapping type: integer) or a timestamp (mapping type: datetime). When changes to such an \n+entity are persisted at the end of a long-running conversation the version of the entity \n+is compared to the version in the database and if they don't match, a `ValidationError` \n+is thrown, indicating that the entity has been modified by someone else already.\n+\n+You designate a version field in an entity as follows. In this example we'll use an integer.\n+\n+```typescript\n+export class User {\n+ // ...\n+ @Property({ version: true })\n+ version!: number;\n+ // ...\n+}\n+```\n+\n+Alternatively a datetime type can be used (which maps to a SQL timestamp or datetime):\n+\n+```typescript\n+export class User {\n+ // ...\n+ @Property({ version: true })\n+ version!: Date;\n+ // ...\n+}\n+```\n+\n+Version numbers (not timestamps) should however be preferred as they can not potentially \n+conflict in a highly concurrent environment, unlike timestamps where this is a possibility, \n+depending on the resolution of the timestamp on the particular database platform.\n+\n+When a version conflict is encountered during `em.flush()`, a `ValidationError` \n+is thrown and the active transaction rolled back (or marked for rollback). This exception \n+can be caught and handled. Potential responses to a `ValidationError` are to present the \n+conflict to the user or to refresh or reload objects in a new transaction and then retrying \n+the transaction.\n+\n+The time between showing an update form and actually modifying the entity can in the worst \n+scenario be as long as your applications session timeout. If changes happen to the entity \n+in that time frame you want to know directly when retrieving the entity that you will hit \n+an optimistic locking exception:\n+\n+You can always verify the version of an entity during a request either when calling \n+`em.findOne()`:\n+\n+```typescript\n+const theEntityId = 1;\n+const expectedVersion = 184;\n+\n+try {\n+ const entity = await orm.em.findOne(User, theEntityId, { lockMode: LockMode.OPTIMISTIC, lockVersion: expectedVersion });\n+\n+ // do the work\n+\n+ await orm.em.flush();\n+} catch (e) {\n+ console.log('Sorry, but someone else has already changed this entity. Please apply the changes again!');\n+}\n+```\n+\n+Or you can use `em.lock()` to find out:\n+\n+```typescript\n+const theEntityId = 1;\n+const expectedVersion = 184;\n+const entity = await orm.em.findOne(User, theEntityId);\n+\n+try {\n+ // assert version\n+ await orm.em.lock(entity, LockMode.OPTIMISTIC, expectedVersion);\n+} catch (e) {\n+ console.log('Sorry, but someone else has already changed this entity. Please apply the changes again!');\n+}\n+```\n+\n+#### Important Implementation Notes\n+\n+You can easily get the optimistic locking workflow wrong if you compare the wrong versions. \n+Say you have Alice and Bob editing a hypothetical blog post:\n+\n+- Alice reads the headline of the blog post being \"Foo\", at optimistic lock version 1 (GET Request)\n+- Bob reads the headline of the blog post being \"Foo\", at optimistic lock version 1 (GET Request)\n+- Bob updates the headline to \"Bar\", upgrading the optimistic lock version to 2 (POST Request of a Form)\n+- Alice updates the headline to \"Baz\", ... (POST Request of a Form)\n+\n+Now at the last stage of this scenario the blog post has to be read again from the database \n+before Alice's headline can be applied. At this point you will want to check if the blog \n+post is still at version 1 (which it is not in this scenario).\n+\n+Using optimistic locking correctly, you **have** to add the version as an additional hidden \n+field (or into the session for more safety). Otherwise you cannot verify the version is still \n+the one being originally read from the database when Alice performed her GET request for the \n+blog post. If this happens you might see lost updates you wanted to prevent with Optimistic \n+Locking.\n+\n+See the example code (frontend):\n+\n+```typescript\n+const res = await fetch('api.example.com/book/123');\n+const book = res.json();\n+console.log(book.version); // prints the current version\n+\n+// user does some changes and calls the PUT handler\n+const changes = { title: 'new title' };\n+await fetch('api.example.com/book/123', {\n+ method: 'PUT',\n+ body: {\n+ ...changes,\n+ version: book.version,\n+ },\n+});\n+```\n+\n+And the corresponding API endpoints:\n+\n+```typescript\n+// GET /book/:id\n+async findOne(req, res) {\n+ const book = await this.em.findOne(Book, +req.query.id);\n+ res.json(book);\n+}\n+\n+// PUT /book/:id\n+async update(req, res) {\n+ const book = await this.em.findOne(Book, +req.query.id, { lockMode: LockMode.OPTIMISTIC, lockVersion: req.body.version });\n+ wrap(book).assign(req.body);\n+ await this.em.flush();\n+\n+ res.json(book);\n+}\n+```\n+\n+Your frontend app loads an entity from API, the response includes the version property. \n+User makes some changes and fires PUT request back to the API, with version field included \n+in the payload. The PUT handler of the API then reads the version and passes it to the \n+`em.findOne()` call.\n+\n+### Pessimistic Locking\n+\n+MikroORM supports Pessimistic Locking at the database level. No attempt is being made to implement \n+pessimistic locking inside MikroORM, rather vendor-specific and ANSI-SQL commands are used to \n+acquire row-level locks. Every Entity can be part of a pessimistic lock, there is no special \n+metadata required to use this feature.\n+\n+However for Pessimistic Locking to work you have to disable the Auto-Commit Mode of your Database \n+and start a transaction around your pessimistic lock use-case using the \"Approach 2: Explicit \n+Transaction Demarcation\" described above. MikroORM will throw an Exception if you attempt to \n+acquire an pessimistic lock and no transaction is running.\n+\n+MikroORM currently supports two pessimistic lock modes:\n+\n+- Pessimistic Write (`LockMode.PESSIMISTIC_WRITE`), locks the underlying database rows for concurrent Read and Write Operations.\n+- Pessimistic Read (`LockMode.PESSIMISTIC_READ`), locks other concurrent requests that attempt to update or lock rows in write mode.\n+\n+You can use pessimistic locks in three different scenarios:\n+\n+1. Using `em.findOne(className, id, { lockMode: LockMode.PESSIMISTIC_WRITE })` or `em.findOne(className, id, { lockMode: LockMode.PESSIMISTIC_READ })`\n+2. Using `em.lock(entity, LockMode.PESSIMISTIC_WRITE)` or `em.lock(entity, LockMode.PESSIMISTIC_READ)`\n+3. Using `QueryBuilder.setLockMode(LockMode.PESSIMISTIC_WRITE)` or `QueryBuilder.setLockMode(LockMode.PESSIMISTIC_READ)`\n+\n+> This part of documentation is highly inspired by [doctrine internals docs](https://www.doctrine-project.org/projects/doctrine-orm/en/latest/reference/transactions-and-concurrency.html)\n+> as the behaviour here is pretty much the same.\n", "unit-of-work.md": "@@ -0,0 +1,104 @@\n+---\n+title: Unit of Work and Transactions\n+sidebar_label: Unit of Work\n+---\n+\n+MikroORM uses the Identity Map pattern to track objects. Whenever you fetch an object from \n+the database, MikroORM will keep a reference to this object inside its `UnitOfWork`. \n+\n+This allows MikroORM room for optimizations. If you call the EntityManager and ask for an \n+entity with a specific ID twice, it will return the same instance:\n+\n+```typescript\n+const authorRepository = orm.em.getRepository(Author);\n+const jon1 = await authorRepository.findOne(1);\n+const jon2 = await authorRepository.findOne(1);\n+\n+// identity map in action\n+console.log(jon1 === jon2); // true\n+```\n+\n+Only one SELECT query will be fired against the database here. In the second `findOne()` \n+call MikroORM will check the identity map first and will skip the database round trip as\n+it will find the entity already loaded.\n+\n+The identity map being indexed by primary keys only allows shortcuts when you ask for objects \n+by primary key. When you query by other properties, you will still get the same reference, \n+but two separate database calls will be made:\n+\n+```typescript\n+const authorRepository = orm.em.getRepository(Author);\n+const jon1 = await authorRepository.findOne({ name: 'Jon Snow' });\n+const jon2 = await authorRepository.findOne({ name: 'Jon Snow' });\n+\n+// identity map in action\n+console.log(jon1 === jon2); // true\n+```\n+\n+MikroORM only knows objects by id, so a query for different criteria has to go to the database, \n+even if it was executed just before. But instead of creating a second `Author` object MikroORM \n+first gets the primary key from the row and checks if it already has an object inside the \n+`UnitOfWork` with that primary key. \n+\n+## Persisting Managed Entities\n+\n+The identity map has a second use-case. When you call `em.flush()`, MikroORM will \n+ask the identity map for all objects that are currently managed. This means you don't have to \n+call `em.persist()` over and over again to pass known objects to the \n+`EntityManager`. This is a NO-OP for known entities, but leads to much code written that is \n+confusing to other developers.\n+\n+The following code WILL update your database with the changes made to the `Author` object, \n+even if you did not call `em.persist()`:\n+\n+```typescript\n+const authorRepository = orm.em.getRepository(Author);\n+const jon = await authorRepository.findOne(1);\n+jon.email = '[email protected]';\n+await authorRepository.flush(); // calling orm.em.flush() has same effect\n+```\n+\n+## How MikroORM Detects Changes\n+\n+MikroORM is a data-mapper that tries to achieve persistence-ignorance (PI). This means you \n+map JS objects into a relational database that do not necessarily know about the database at \n+all. A natural question would now be, \"how does MikroORM even detect objects have changed?\".\n+\n+For this MikroORM keeps a second map inside the `UnitOfWork`. Whenever you fetch an object \n+from the database MikroORM will keep a copy of all the properties and associations inside \n+the `UnitOfWork`. \n+\n+Now whenever you call `em.flush()` MikroORM will iterate over all entities you \n+previously marked for persisting via `em.persist()`. For each object it will\n+compare the original property and association values with the values that are currently set \n+on the object. If changes are detected then the object is queued for a UPDATE operation. \n+Only the fields that actually changed are updated.\n+\n+## Implicit Transactions\n+\n+First and most important implication of having Unit of Work is that it allows handling\n+transactions automatically. \n+\n+When you call `em.flush()`, all computed changes are queried inside a database\n+transaction (if supported by given driver). This means that you can control the boundaries \n+of transactions simply by calling `em.persist()` and once all your changes \n+are ready, simply calling `flush()` will run them inside a transaction. \n+\n+> You can also control the transaction boundaries manually via `em.transactional(cb)`.\n+\n+```typescript\n+const user = await em.findOne(User, 1);\n+user.email = '[email protected]';\n+const car = new Car();\n+user.cars.add(car);\n+\n+// thanks to bi-directional cascading we only need to persist user entity\n+// flushing will create a transaction, insert new car and update user with new email\n+await em.persistAndFlush(user);\n+```\n+\n+You can find more information about transactions in [Transactions and concurrency](transactions.md) \n+page.\n+\n+> This part of documentation is highly inspired by [doctrine internals docs](https://www.doctrine-project.org/projects/doctrine-orm/en/2.6/reference/unitofwork.html)\n+> as the behaviour here is pretty much the same.\n", "upgrading-v2-to-v3.md": "@@ -0,0 +1,158 @@\n+---\n+title: Upgrading from v2 to v3\n+---\n+\n+Following sections describe (hopefully) all breaking changes, most of them might be not valid \n+for you, like if you do not use custom `NamingStrategy` implementation, you do not care about\n+the interface being changed.\n+\n+## Default value of autoFlush has changed to false\n+\n+> If you had `autoFlush: false` in your ORM configuration before, you can now remove \n+> this line, no changes are needed in your app. \n+\n+Default value for `autoFlush` is now `false`. That means you need to call \n+`em.flush()` yourself to persist changes into database. You can still change this via ORM's\n+options to ease the transition but generally it is not recommended as it can cause unwanted\n+small transactions being created around each `persist`. \n+\n+```typescript\n+orm.em.persist(new Entity()); // no auto-flushing by default\n+await orm.em.flush();\n+await orm.em.persist(new Entity(), true); // you can still use second parameter to auto-flush\n+```\n+\n+## Reworked entity definition\n+\n+> Implementing those interfaces is optional. \n+\n+Now it is no longer needed to merge entities with `IEntity` interface, that was polluting entity's \n+interface with internal methods. New interfaces `IdentifiedEntity<T>`, `UuidEntity<T>` and `MongoEntity<T>` \n+are introduced, that can be implemented by entities. They are not adding any new properties or methods, \n+keeping the entity's interface clean. This is also the reason why they can be omitted.\n+\n+`IEntity` interface has been renamed to `AnyEntity<T, PK>` and it no longer has public methods \n+like `toJSON()`, `toObject()` or `init()`. One can use `wrap()` method provided by ORM that\n+will enhance property type when needed with those methods (`await wrap(book.author).init()`). \n+To keep all methods available on the entity, you can still use interface merging with \n+`WrappedEntity<T, PK>` that both extends `AnyEntity<T, PK>` and defines all those methods.\n+\n+You can mark the entity by implementing one of `*Entity` interfaces:\n+\n+- `IdEntity<T>` for numeric/string PK on `id` property (`id: number`)\n+- `UuidEntity<T>` for string PK on `uuid` property (`uuid: string`)\n+- `MongoEntity<T>` for mongo, where `id: string` and `_id: ObjectId` are required\n+- `AnyEntity<T, PK>` for other possible properties (fill the PK property name to `PK` \n+parameter, e.g.: `AnyEntity<Book, 'myPrimaryProperty'>'`)\n+\n+To keep all public methods that were part of `IEntity` interface in v2, you can use \n+`WrappedEntity<T, PK>` via interface merging:\n+\n+```typescript\n+@Entity()\n+export class Book { ... }\n+export interface Book extends WrappedEntity<Book, 'id'> { }\n+```\n+\n+For more examples, take a look at [defining entities section](defining-entities.md).\n+\n+## Integrated Knex.js as query builder and runner\n+\n+`QueryBuilder` now internally uses knex to run all queries. As knex already supports connection \n+pooling, this feature comes without any effort. New configuration for pooling is now available\n+\n+Transactions now require using `em.transactional()` method, previous helpers \n+`beginTransaction`/`commit`/`rollback` are now removed.\n+\n+All transaction management has been removed from `IDatabaseDriver` interface, now EM handles \n+this, passing the transaction context (carried by EM, and created by `Connection`) to all \n+driver methods. New methods on EM exists: `isInTransaction()` and `getTransactionContext()`.\n+\n+In postgres driver, it used to be required to pass parameters as indexed dollar sign \n+($1, $2, ...), while now knex requires the placeholder to be simple question mark (`?`), \n+like in other dialects, so this is now unified with other drivers.\n+\n+## ManyToMany now uses composite primary key\n+\n+Previously it was required to have autoincrement primary key for m:n pivot tables. Now this \n+has changed. By default, only foreign columns are required and composite key over both of them\n+is used as primary key.\n+\n+To preserve stable order of collections, you can force previous behaviour by defining the \n+m:n property as `fixedOrder: true`, which will start ordering by `id` column. You can also \n+override the order column name via `fixedOrderColumn: 'order'`. \n+\n+You can also specify default ordering via `orderBy: { ... }` attribute.\n+\n+## Entity references now don't have instantiated collections\n+\n+Previously all entity instances, including entity references (not fully loaded entities where\n+we know only the primary key), had instantiated collection classes. Now only initialized entities\n+have them.\n+\n+```typescript\n+const book = em.getReference(Book, 1);\n+console.log(book.tags); // undefined\n+await book.init();\n+console.log(book.tags); // instance of Collection (not initialized)\n+```\n+\n+## EntityAssigner.assign() requires EM for new entities\n+\n+Previously all entities had internal reference to the root EM - the one created when \n+initializing the ORM. Now only managed entities (those merged to the EM, e.g. loaded \n+from the database) have this internal reference. \n+\n+To use `assign()` method on new (not managed) entities, you need to provide the `em`\n+parameter:\n+\n+```typescript\n+const book = new Book();\n+wrap(book).assign(data, { em: orm.em });\n+```\n+\n+## Strict FilterQuery and smart query conditions\n+\n+`FilterQuery` now does not allow using smart query operators. You can either cast your condition \n+as any or use object syntax instead (instead of `{ 'age:gte': 18 }` use `{ age: { $gte: 18 } }`).\n+\n+## Logging configuration\n+\n+Previously to start logging it was required to provide your custom logger. Logger now defaults \n+to `console.log()`, and users can specify what namespaces are they interested in via `debug` \n+option. `true`/`false` will enable/disable all namespaces.\n+\n+Available logger namespaces: `'query' | 'query-params' | 'discovery' | 'info'`.\n+\n+## Removed deprecated fk option from 1:m and m:1 decorators \n+\n+Use `mappedBy`/`inversedBy` instead.\n+\n+## SchemaGenerator.generate() is now async\n+\n+If you used `SchemaGenerator`, now there is CLI tool you can use instead. Learn more \n+in [SchemaGenerator docs](schema-generator.md). To setup CLI, take a look at \n+[installation section](installation.md).\n+\n+## New method on NamingStrategy interface\n+\n+`getClassName()` is used to find entity class name based on its file name. Now users can \n+override the default implementation to accommodate their specific needs.\n+\n+If you used custom naming strategy, you will either need to implement this method yourself, \n+or extend `AbstractNamingStrategy`.\n+\n+## TypescriptMetadataProvider has been renamed\n+\n+The name is now `TsMorphMetadataProvider`, there is also newly added `ReflectMetadataProvider`\n+that uses `reflect-metadata` instead. As `TypescriptMetadataProvider` was the default, no \n+changes should be required. \n+\n+## Updated mongodb driver\n+\n+MongoDB driver version 3.3.4 or higher is now required.\n+\n+## EntityManager.find() now requires where parameter\n+`EntityManager` has now same `find` method interface aligned with `EntityRepository`, \n+`where` parameter is now required. To select all entities, use `em.find(Entity, {})` \n+as value.\n", "upgrading-v3-to-v4.md": "@@ -0,0 +1,247 @@\n+---\n+title: Upgrading from v3 to v4\n+---\n+\n+> Following sections describe (hopefully) all breaking changes, most of them might be not valid \n+> for you, like if you do not use custom `NamingStrategy` implementation, you do not care about\n+> the interface being changed.\n+\n+## Node 10.13.0+ required\n+\n+Support for older node versions was dropped. \n+\n+## TypeScript 3.7+ required\n+\n+Support for older TypeScript versions was dropped. \n+\n+## Monorepo\n+\n+The ORM has been split into several packages. In v4 one needs to require\n+`@mikro-orm/core` and a driver package, e.g. `@mikro-orm/mysql`. This driver\n+package already contains the `mysql2` dependency, so you can remove that from\n+your `package.json`. \n+\n+- `@mikro-orm/core`\n+- `@mikro-orm/reflection` - `TsMorphMetadataProvider`\n+- `@mikro-orm/cli` - CLI support, requires entity-generator, migrator and knex\n+- `@mikro-orm/knex` - SQL support\n+- `@mikro-orm/entity-generator`\n+- `@mikro-orm/migrations`\n+- `@mikro-orm/mysql`\n+- `@mikro-orm/mariadb`\n+- `@mikro-orm/mysql-base` - Common implementation for mysql and mariadb (internal)\n+- `@mikro-orm/sqlite`\n+- `@mikro-orm/postgresql`\n+- `@mikro-orm/mongodb`\n+\n+> For easier transition, meta package mikro-orm is still present, reexporting \n+> core, reflection, migrations, entity-generator and cli packages. You should \n+> **not** install both `mikro-orm` and `@mikro-orm/core` packages together. \n+\n+> You should prefer the `@mikro-orm/core` over `mikro-orm` package, there were\n+> weird dependency issues reported with the `mikro-orm` meta-package. \n+\n+## SqlEntityManager and MongoEntityManager\n+\n+In v4 the `core` package, where `EntityManager` and `EntityRepository` are \n+defined, is not dependent on knex, and therefore it cannot have a method \n+returning a `QueryBuilder`. You need to import the SQL flavour of the EM \n+from the driver package to access the `createQueryBuilder()` method.\n+\n+> The SQL flavour of EM is actually called `SqlEntityManager`, it is exported both under \n+> this name and under `EntityManager` alias, so you can just change the \n+> location from where you import.\n+\n+```typescript\n+import { EntityManager } from '@mikro-orm/mysql'; // or any other SQL driver package\n+\n+const em: EntityManager;\n+const qb = await em.createQueryBuilder(...);\n+```\n+\n+Same applies for the `aggregate()` method in mongo driver:\n+\n+```typescript\n+import { EntityManager } from '@mikro-orm/mongodb';\n+\n+const em: EntityManager;\n+const ret = await em.aggregate(...);\n+```\n+\n+> The mongo flavour of EM is actually called `MongoEntityManager`, it is exported both under \n+> this name and under `EntityManager` alias, so you can just change the \n+> location from where you import.\n+\n+## Different default `pivotTable`\n+\n+Implementation of `UnderscoreNamingStrategy` and `EntityCaseNamingStrategy` \n+`joinTableName()` method has changed. You can use `pivotTable` on the owning side\n+of M:N relation to specify the table name manually. \n+\n+Previously the table name did not respect property name, if one defined multiple\n+M:N relations between same entities, there were conflicts and one would have to \n+specify `pivotTable` name manually at least on one of them. With the new way, \n+we can be sure that the table name won't conflict with other pivot tables. \n+\n+Previously the name was constructed from 2 entity names as `entity_a_to_entity_b`,\n+ignoring the actual property name. In v4 the name will be `entity_a_coll_name` in \n+case of the collection property on the owning side being named `collName`. \n+\n+## Changes in folder-based discovery (`entitiesDirs` removed)\n+\n+`entitiesDirs` and `entitiesDirsTs` were removed in favour of `entities` and `entitiesTs`,\n+`entities` will be used as a default for `entitiesTs` (that is used when we detect `ts-node`).\n+\n+`entities` can now contain mixture of paths to directories, globs pointing to entities,\n+or references to the entities or instances of `EntitySchema`. \n+\n+This basically means that all you need to change is renaming `entitiesDirs` to `entities`.\n+\n+```typescript\n+MikroORM.init({\n+ entities: ['dist/**/entities', 'dist/**/*.entity.js', FooBar, FooBaz],\n+ entitiesTs: ['src/**/entities', 'src/**/*.entity.ts', FooBar, FooBaz],\n+});\n+```\n+\n+## Changes in `wrap()` helper, `WrappedEntity` interface and `Reference` wrapper\n+\n+Previously all the methods and properties of `WrappedEntity` interface were\n+added to the entity prototype during discovery. In v4 there is only one property\n+added: `__helper: WrappedEntity`. `WrappedEntity` has been converted to actual class.\n+\n+`wrap(entity)` no longer returns the entity, now the `WrappedEntity` instance is \n+being returned. It contains only public methods (`init`, `assign`, `isInitialized`, ...),\n+if you want to access internal properties like `__meta` or `__em`, you need to explicitly\n+ask for the helper via `wrap(entity, true)`.\n+\n+Internal methods (with `__` prefix) were also removed from the `Reference` class, \n+use `wrap(ref, true)` to access them. \n+\n+Instead of interface merging with `WrappedEntity`, one can now use classic inheritance,\n+by extending `BaseEntity` exported from `@mikro-orm/core`. If you do so, `wrap(entity)` \n+will return your entity. \n+\n+## Removed `flush` parameter from `persist()` and `remove()` methods\n+\n+`flush` param is removed, both `persist` and `remove` methods are synchronous and\n+require explicit flushing, possibly via fluent interface call.\n+\n+```typescript\n+// before\n+await em.persist(jon, true);\n+await em.remove(Author, jon, true);\n+\n+// after\n+await em.persist(jon).flush();\n+await em.remove(jon).flush();\n+```\n+\n+## `remove()` method requires entity instances\n+\n+The `em.remove()` method originally allowed to pass either entity instance, or \n+a condition. When one passed a condition, it was firing a native delete query, \n+without handling transactions or hooks. \n+\n+In v4, the method is now simplified and works only with entity instances. Use \n+`em.nativeDelete()` explicitly if you want to fire a delete query instead of \n+letting the `UnitOfWork` doing its job.\n+\n+```typescript\n+// before\n+await em.remove(Author, 1); // fires query directly\n+\n+// after \n+await em.nativeDelete(Author, 1);\n+```\n+\n+> `em.removeEntity()` has been removed in favour of `em.remove()` (that now has almost the same signature).\n+\n+## Type safe references\n+\n+EM now returns `Loaded<T, P>` instead of the entity (`T`). This type automatically\n+adds synchronous method `get()` that returns the entity (for references) or array\n+of entities (for collections).\n+\n+`Reference.get()` is now available only with correct `Loaded` type hint and is used \n+as a sync getter for the entity, just like `unwrap()`. You can use `Reference.load(prop)` \n+for the original `get()` method functionality. \n+\n+`em.find()` and similar methods now have two type arguments, due to TypeScript not supporting partial\n+type inference, when you specify the `T` explicitly (without also explicitly \n+specifying the load hint), the inference will not work. This use case was mainly\n+for usage without classes (interfaces + `EntitySchema`) - in that case it is now \n+supported to pass actual instance of `EntitySchema` as the first parameter to these\n+methods, that will allow correct type inference:\n+\n+```typescript\n+const author = await em.findOne(AuthorSchema, { ... }, ['books']);\n+console.log(author.books.get()); // `get()` is now inferred correctly\n+```\n+\n+## Custom types are now type safe\n+\n+Generic `Type` class has now two type arguments - the input and output types. \n+Input type defaults to `string`, output type defaults to the input type. \n+\n+You might need to explicitly provide the types if your methods are strictly typed.\n+\n+## Custom type serialization\n+\n+Custom types used to be serialized to the database value. In v4, the runtime \n+value is used by default. Implement custom `toJSON()` method if you need to \n+customize this.\n+\n+## Property `default` and `defaultRaw`\n+\n+Previously the `default` option of properties was used as is, so we had to wrap \n+strings in quotes (e.g. `@Property({ default: \"'foo bar'\" })`). \n+\n+In v4 the `default` is typed as `string | number | boolean | null` and when used\n+with string value, it will be automatically quoted. \n+\n+To use SQL functions we now need to use `defaultRaw`: `@Property({ defaultRaw: 'now()' })`.\n+\n+## `autoFlush` option has been removed\n+\n+Also `persistLater()` and `removeLater()` methods are deprecated. Use `persist()` or\n+`remove` respectively.\n+\n+## `IdEntity`, `UuidEntity` and `MongoEntity` interfaces are removed\n+\n+They were actually never needed. \n+\n+## MongoDB driver is no longer the default\n+\n+You need to specify the platform type either via `type` option or provide the driver\n+implementation via `driver` option. \n+\n+Available platforms types: `[ 'mongo', 'mysql', 'mariadb', 'postgresql', 'sqlite' ]`\n+\n+## Removed configuration `discovery.tsConfigPath`\n+\n+Removed as it is no longer needed, it was used only for `TsMorphMetadataProvider`,\n+when the `entitiesDirsTs` were not explicitly provided. In v4, this is no longer\n+needed, as ts-morph discovery will use `d.ts` files instead, that should be located\n+next to the compiled entities. \n+\n+## Changes in query highlighting\n+\n+Previously Highlight.js was used to highlight various things in the CLI, \n+like SQL and mongo queries, or migrations or entities generated via CLI.\n+While the library worked fine, it was causing performance issues mainly \n+for those bundling via webpack and using lambdas, as the library was huge.\n+\n+In v4 highlighting is disabled by default, and there are 2 highlighters \n+you can optionally use (you need to install them first).\n+\n+```typescript\n+import { SqlHighlighter } from '@mikro-orm/sql-highlighter';\n+\n+MikroORM.init({\n+ highlighter: new SqlHighlighter(),\n+ // ...\n+});\n+```\n+\n+For MongoDB you can use `@mikro-orm/mongo-highlighter`.\n", "usage-with-js.md": "@@ -0,0 +1,98 @@\n+---\n+title: Usage with JavaScript\n+sidebar_label: Usage with Vanilla JS\n+---\n+\n+Since MikroORM 3.2, we can use `EntitySchema` helper to define own entities without \n+decorators, which works also for Vanilla JavaScript.\n+\n+> Read more about `EntitySchema` in [this section](entity-schema.md).\n+\n+Here is an example of such entity:\n+\n+```javascript title=\"./entities/Author.js\"\n+const { Collection, EntitySchema } = require('@mikro-orm/core');\n+const { Book } = require('./Book');\n+const { BaseEntity } = require('./BaseEntity');\n+\n+/**\n+ * @property {number} id\n+ * @property {Date} createdAt\n+ * @property {Date} updatedAt\n+ * @property {string} name\n+ * @property {string} email\n+ * @property {number} age\n+ * @property {boolean} termsAccepted\n+ * @property {string[]} identities\n+ * @property {Date} born\n+ * @property {Collection<Book>} books\n+ * @property {Book} favouriteBook\n+ * @property {number} version\n+ * @property {string} versionAsString\n+ */\n+class Author extends BaseEntity {\n+\n+ /**\n+ * @param {string} name\n+ * @param {string} email\n+ */\n+ constructor(name, email) {\n+ super();\n+ this.name = name;\n+ this.email = email;\n+ this.books = new Collection(this);\n+ this.createdAt = new Date();\n+ this.updatedAt = new Date();\n+ this.termsAccepted = false;\n+ }\n+\n+}\n+\n+export const schema = new EntitySchema({\n+ class: Author,\n+ properties: {\n+ name: { type: 'string' },\n+ email: { type: 'string', unique: true },\n+ age: { type: 'number', nullable: true },\n+ termsAccepted: { type: 'boolean', default: 0, onCreate: () => false },\n+ identities: { type: 'string[]', nullable: true },\n+ born: { type: DateType, nullable: true, length: 3 },\n+ books: { reference: '1:m', entity: () => 'Book', mappedBy: book => book.author },\n+ favouriteBook: { reference: 'm:1', type: 'Book' },\n+ version: { type: 'number', persist: false },\n+ },\n+});\n+\n+module.exports.Author = Author;\n+module.exports.entity = Author;\n+module.exports.schema = schema;\n+```\n+\n+> Do not forget to provide `name` and `path` schema parameters as well as `entity` \n+> and `schema` exports.\n+\n+Reference parameter can be one of (where `SCALAR` is the default one):\n+\n+```typescript\n+export enum ReferenceType {\n+ SCALAR = 'scalar',\n+ ONE_TO_ONE = '1:1',\n+ MANY_TO_ONE = 'm:1',\n+ ONE_TO_MANY = '1:m',\n+ MANY_TO_MANY = 'm:n',\n+ EMBEDDED = 'embedded',\n+}\n+```\n+\n+You can register your entities as usual.\n+\n+```javascript\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, BookTag, BaseEntity],\n+ dbName: 'my-db-name',\n+ type: 'mysql',\n+});\n+```\n+\n+For more examples of plain JavaScript entity definitions take a look\n+[Express JavaScript example](https://github.com/mikro-orm/express-js-example-app). \n", "usage-with-mongo.md": "@@ -0,0 +1,163 @@\n+---\n+title: Usage with MongoDB\n+---\n+\n+To use `mikro-orm` with mongo database, do not forget to install `mongodb` dependency. As `MongoDriver`\n+is the default one, you do not need to provide it.\n+\n+Then call `MikroORM.init` as part of bootstrapping your app:\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, ...],\n+ dbName: 'my-db-name',\n+ clientUrl: '...',\n+ type: 'mongo',\n+});\n+```\n+\n+## Defining entity\n+\n+When defining entity, do not forget to define primary key like this:\n+\n+```typescript\n+@PrimaryKey()\n+_id: ObjectId;\n+\n+@SerializedPrimaryKey()\n+id!: string; // won't be saved in the database\n+```\n+\n+> Only `_id: ObjectId` will be saved in the database. `id: string` is virtual and is \n+> also optional. \n+\n+## ObjectId and string id duality\n+\n+Every entity has both `ObjectId` and `string` id available, also all methods of `EntityManager` \n+and `EntityRepository` supports querying by both of them. \n+\n+```typescript\n+const author = orm.em.getReference('...id...');\n+console.log(author.id); // returns '...id...'\n+console.log(author._id); // returns ObjectId('...id...')\n+\n+// all of those will return the same results\n+const article = '...article id...'; // string id\n+const book = '...book id...'; // string id\n+const repo = orm.em.getRepository(Author);\n+const foo1 = await repo.find({ id: { $in: [article] }, favouriteBook: book });\n+const bar1 = await repo.find({ id: { $in: [new ObjectId(article)] }, favouriteBook: new ObjectId(book) });\n+const foo2 = await repo.find({ _id: { $in: [article] }, favouriteBook: book });\n+const bar2 = await repo.find({ _id: { $in: [new ObjectId(article)] }, favouriteBook: new ObjectId(book) });\n+```\n+\n+## ManyToMany collections with inlined pivot array\n+\n+As opposed to SQL drivers that use pivot tables, in mongo we can leverage available array type\n+to store array of collection items (identifiers). This approach has two main benefits:\n+\n+1. Collection is stored on owning side entity, so we know how many items are there even before\n+initializing the collection.\n+2. As there are no pivot tables, resulting database queries are much simpler.\n+\n+## Transactions\n+\n+Starting with v3.4, MongoDB driver supports transactions. To use transactions, there\n+are several things you need to respect:\n+\n+- you need to use replica set (see [run-rs](https://github.com/vkarpov15/run-rs))\n+- implicit transactions are disabled by default\n+ - use `implicitTransactions: true` to enable them globally\n+ - or use explicit transaction demarcation via `em.transactional()`\n+- you need to explicitly create all collections before working with them\n+ - use `em.getDriver().createCollections()` method to do so\n+\n+```sh\n+# first create replica set\n+$ run-rs -v 4.2.3\n+```\n+\n+```typescript\n+import { MikroORM } from '@mikro-orm/core';\n+import { MongoDriver } from '@mikro-orm/mongodb';\n+\n+// make sure to provide the MongoDriver type hint\n+const orm = await MikroORM.init<MongoDriver>({\n+ entities: [Author, Book, ...],\n+ clientUrl: 'mongodb://localhost:27017,localhost:27018,localhost:27019/my-db-name?replicaSet=rs0',\n+ type: 'mongo',\n+ implicitTransactions: true, // defaults to false\n+});\n+\n+await orm.em.getDriver().createCollections();\n+```\n+\n+> The `createCollections` method is present on the `MongoDriver` class only. You need \n+> to have the entity manager correctly typed (as `EntityManager<MongoDriver>`).\n+\n+## Indexes\n+\n+Starting with v3.4, MongoDB driver supports indexes and unique constraints. You can \n+use `@Index()` and `@Unique()` as described in [Defining Entities section](defining-entities.md#indexes).\n+To automatically create new indexes when initializing the ORM, you need to enable\n+`ensureIndexes` option. \n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, ...],\n+ dbName: 'my-db-name',\n+ type: 'mongo',\n+ ensureIndexes: true, // defaults to false\n+});\n+``` \n+\n+Alternatively you can call `ensureIndexes()` method on the `MongoDriver`:\n+\n+```typescript\n+await orm.em.getDriver().ensureIndexes();\n+```\n+\n+> You can pass additional index/unique options via `options` parameter:\n+> \n+> `@Unique({ options: { partialFilterExpression: { name: { $exists: true } } }})`\n+\n+> You can also create text indexes by passing `type` parameter:\n+> \n+> `@Index({ properties: ['name', 'caption'], type: 'text' })`\n+\n+> If you provide only `options` in the index definition, it will be used as is, \n+> this allows to define any kind of index:\n+>\n+> `@Index({ options: { point: '2dsphere', title: -1 } })` \n+\n+## Native collection methods\n+\n+Sometimes you need to perform some bulk operation, or you just want to populate your\n+database with initial fixtures. Using ORM for such operations can bring unnecessary\n+boilerplate code. In this case, you can use one of `nativeInsert/nativeUpdate/nativeDelete`\n+methods:\n+\n+```typescript\n+em.nativeInsert<T extends AnyEntity>(entityName: string, data: any): Promise<IPrimaryKey>;\n+em.nativeUpdate<T extends AnyEntity>(entityName: string, where: FilterQuery<T>, data: any): Promise<number>;\n+em.nativeDelete<T extends AnyEntity>(entityName: string, where: FilterQuery<T> | any): Promise<number>;\n+```\n+\n+Those methods execute native driver methods like Mongo's `insertOne/updateMany/deleteMany` collection methods respectively. \n+This is common interface for all drivers, so for MySQL driver, it will fire native SQL queries. \n+Keep in mind that they do not hydrate results to entities, and they do not trigger lifecycle hooks. \n+\n+They are also available as `EntityRepository` shortcuts:\n+\n+```typescript\n+EntityRepository.nativeInsert(data: any): Promise<IPrimaryKey>;\n+EntityRepository.nativeUpdate(where: FilterQuery<T>, data: any): Promise<number>;\n+EntityRepository.nativeDelete(where: FilterQuery<T> | any): Promise<number>;\n+```\n+\n+There is also shortcut for calling `aggregate` method:\n+\n+```typescript\n+em.aggregate(entityName: string, pipeline: any[]): Promise<any[]>;\n+EntityRepository.aggregate(pipeline: any[]): Promise<any[]>;\n+```\n", "usage-with-nestjs.md": "@@ -0,0 +1,266 @@\n+---\n+title: Using MikroORM with NestJS framework\n+sidebar_label: Usage with NestJS\n+---\n+\n+## Installation\n+\n+Easiest way to integrate MikroORM to Nest is via [`@mikro-orm/nestjs` module](https://github.com/mikro-orm/nestjs).\n+Simply install it next to Nest, MikroORM and underlying driver: \n+\n+```bash\n+$ yarn add @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mongodb # for mongo\n+$ yarn add @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mysql # for mysql/mariadb\n+$ yarn add @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mariadb # for mysql/mariadb\n+$ yarn add @mikro-orm/core @mikro-orm/nestjs @mikro-orm/postgresql # for postgresql\n+$ yarn add @mikro-orm/core @mikro-orm/nestjs @mikro-orm/sqlite # for sqlite\n+```\n+\n+or\n+\n+```bash\n+$ npm i -s @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mongodb # for mongo\n+$ npm i -s @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mysql # for mysql/mariadb\n+$ npm i -s @mikro-orm/core @mikro-orm/nestjs @mikro-orm/mariadb # for mysql/mariadb\n+$ npm i -s @mikro-orm/core @mikro-orm/nestjs @mikro-orm/postgresql # for postgresql\n+$ npm i -s @mikro-orm/core @mikro-orm/nestjs @mikro-orm/sqlite # for sqlite\n+```\n+\n+Once the installation process is completed, we can import the `MikroOrmModule` into the root `AppModule`.\n+\n+```typescript\n+@Module({\n+ imports: [\n+ MikroOrmModule.forRoot({\n+ entities: ['./dist/entities'],\n+ entitiesTs: ['./src/entities'],\n+ dbName: 'my-db-name.sqlite3',\n+ type: 'sqlite',\n+ }),\n+ ],\n+ controllers: [AppController],\n+ providers: [AppService],\n+})\n+export class AppModule {}\n+```\n+\n+The `forRoot()` method accepts the same configuration object as `init()` from the MikroORM package. \n+You can also omit the parameter to use the CLI config.\n+\n+Afterward, the `EntityManager` will be available to inject across entire project (without importing any module elsewhere).\n+\n+```ts\n+@Injectable()\n+export class MyService {\n+\n+ constructor(private readonly orm: MikroORM,\n+ private readonly em: EntityManager) { }\n+\n+}\n+```\n+\n+To define which repositories shall be registered in the current scope you can use the `forFeature()` method. For example, in this way:\n+\n+> You should **not** register your base entities via `forFeature()`, as there are no\n+> repositories for those. On the other hand, base entities need to be part of the list\n+> in `forRoot()` (or in the ORM config in general).\n+\n+```typescript\n+// photo.module.ts\n+\n+@Module({\n+ imports: [MikroOrmModule.forFeature([Photo])],\n+ providers: [PhotoService],\n+ controllers: [PhotoController],\n+})\n+export class PhotoModule {}\n+```\n+\n+and import it into the root `AppModule`:\n+\n+```typescript\n+// app.module.ts\n+@Module({\n+ imports: [MikroOrmModule.forRoot(...), PhotoModule],\n+})\n+export class AppModule {}\n+```\n+\n+In this way we can inject the `PhotoRepository` to the `PhotoService` using the `@InjectRepository()` decorator:\n+\n+```typescript\n+@Injectable()\n+export class PhotoService {\n+ constructor(\n+ @InjectRepository(Photo)\n+ private readonly photoRepository: EntityRepository<Photo>\n+ ) {}\n+\n+ // ...\n+\n+}\n+```\n+\n+## Auto entities automatically\n+\n+> `autoLoadEntities` option was added in v4.1.0 \n+\n+Manually adding entities to the entities array of the connection options can be \n+tedious. In addition, referencing entities from the root module breaks application \n+domain boundaries and causes leaking implementation details to other parts of the \n+application. To solve this issue, static glob paths can be used.\n+\n+Note, however, that glob paths are not supported by webpack, so if you are building \n+your application within a monorepo, you won't be able to use them. To address this \n+issue, an alternative solution is provided. To automatically load entities, set the \n+`autoLoadEntities` property of the configuration object (passed into the `forRoot()` \n+method) to `true`, as shown below: \n+\n+```ts\n+@Module({\n+ imports: [\n+ MikroOrmModule.forRoot({\n+ ...\n+ autoLoadEntities: true,\n+ }),\n+ ],\n+})\n+export class AppModule {}\n+```\n+\n+With that option specified, every entity registered through the `forFeature()` \n+method will be automatically added to the entities array of the configuration \n+object.\n+\n+> Note that entities that aren't registered through the `forFeature()` method, but \n+> are only referenced from the entity (via a relationship), won't be included by \n+> way of the `autoLoadEntities` setting.\n+\n+> Using `autoLoadEntities` also has no effect on the MikroORM CLI - for that we \n+> still need CLI config with the full list of entities. On the other hand, we can\n+> use globs there, as the CLI won't go thru webpack.\n+\n+## Request scoped handlers in queues\n+\n+> `@UseRequestContext()` decorator was added in v4.1.0 \n+\n+As mentioned in the docs, we need a clean state for each request. That is handled\n+automatically thanks to the `RequestContext` helper registered via middleware. \n+\n+But middlewares are executed only for regular HTTP request handles, what if we need\n+a request scoped method outside of that? One example of that is queue handlers or \n+scheduled tasks. \n+\n+We can use the `@UseRequestContext()` decorator. It requires you to first inject the\n+`MikroORM` instance to current context, it will be then used to create the context \n+for you. Under the hood, the decorator will register new request context for your \n+method and execute it inside the context. \n+\n+```ts\n+@Injectable()\n+export class MyService {\n+\n+ constructor(private readonly orm: MikroORM) { }\n+\n+ @UseRequestContext()\n+ async doSomething() {\n+ // this will be executed in a separate context\n+ }\n+\n+}\n+```\n+\n+## Using `AsyncLocalStorage` for request context\n+\n+By default, `domain` api use used in the `RequestContext` helper. Since `@mikro-orm/[email protected]`,\n+you can use the new `AsyncLocalStorage` too, if you are on up to date node version:\n+\n+```typescript\n+// create new (global) storage instance\n+const storage = new AsyncLocalStorage<EntityManager>();\n+\n+@Module({\n+ imports: [\n+ MikroOrmModule.forRoot({\n+ // ...\n+ registerRequestContext: false, // disable automatatic middleware\n+ context: () => storage.getStore(), // use our AsyncLocalStorage instance\n+ }),\n+ ],\n+ controllers: [AppController],\n+ providers: [AppService],\n+})\n+export class AppModule {}\n+\n+// register the request context middleware\n+const app = await NestFactory.create(AppModule, { ... });\n+\n+app.use((req, res, next) => {\n+ storage.run(orm.em.fork(true, true), next);\n+});\n+```\n+\n+## Using custom repositories\n+\n+When using custom repositories, we can get around the need for `@InjectRepository()`\n+decorator by naming our repositories the same way as `getRepositoryToken()` method do:\n+\n+```ts\n+export const getRepositoryToken = <T> (entity: EntityName<T>) => `${Utils.className(entity)}Repository`;\n+```\n+\n+In other words, as long as we name the repository same was as the entity is called, \n+appending `Repository` suffix, the repository will be registered automatically in \n+the Nest.js DI container.\n+\n+`**./author.entity.ts**`\n+\n+```ts\n+@Entity()\n+export class Author {\n+\n+ // to allow inference in `em.getRepository()`\n+ [EntityRepositoryType]?: AuthorRepository;\n+\n+}\n+```\n+\n+`**./author.repository.ts**`\n+\n+```ts\n+@Repository(Author)\n+export class AuthorRepository extends EntityRepository<Author> {\n+\n+ // your custom methods...\n+\n+}\n+```\n+\n+As the custom repository name is the same as what `getRepositoryToken()` would\n+return, we do not need the `@InjectRepository()` decorator anymore:\n+\n+```ts\n+@Injectable()\n+export class MyService {\n+\n+ constructor(private readonly repo: AuthorRepository) { }\n+\n+}\n+```\n+\n+## Testing\n+\n+The `@mikro-orm/nestjs` package exposes `getRepositoryToken()` function that returns prepared token based on a given entity to allow mocking the repository.\n+\n+```typescript\n+@Module({\n+ providers: [\n+ PhotoService,\n+ {\n+ provide: getRepositoryToken(Photo),\n+ useValue: mockedRepository,\n+ },\n+ ],\n+})\n+export class PhotoModule {}\n+```\n", "usage-with-sql.md": "@@ -0,0 +1,184 @@\n+---\n+title: Usage with MySQL, MariaDB, PostgreSQL or SQLite\n+sidebar_label: Usage with SQL Drivers\n+---\n+\n+To use `mikro-orm` with MySQL database, do not forget to install `mysql2` dependency and set \n+the type option to `mysql` when initializing ORM.\n+\n+Similarly for SQLite install `sqlite` dependency and provide `sqlite` database type. For \n+PostgreSQL install `pg` and provide `postgresql` type.\n+\n+Then call `MikroORM.init` as part of bootstrapping your app:\n+\n+```typescript\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, ...],\n+ dbName: 'my-db-name',\n+ type: 'mysql', // or 'sqlite' or 'postgresql' or 'mariadb'\n+});\n+```\n+\n+## Custom driver\n+\n+If you want to use database that is not currently supported, you can implement your own driver.\n+More information about how to create one can be [found here](custom-driver.md). Then provide the \n+driver class via `driver` configuration option: \n+\n+```typescript\n+import { MyCustomDriver } from './MyCustomDriver.ts';\n+\n+const orm = await MikroORM.init({\n+ entities: [Author, Book, ...],\n+ dbName: 'my-db-name',\n+ driver: MyCustomDriver, // provide the class, not just its name\n+});\n+```\n+\n+## Schema\n+\n+Currently you will need to maintain the database schema yourself. For initial dump, you can \n+use [`SchemaGenerator` helper](schema-generator.md). \n+\n+## ManyToMany collections with pivot tables\n+\n+As opposed to `MongoDriver`, in MySQL we use pivot tables to handle `ManyToMany` relations:\n+\n+```sql\n+CREATE TABLE `publisher_to_test` (\n+ `id` int(11) unsigned NOT NULL AUTO_INCREMENT,\n+ `publisher_id` int(11) DEFAULT NULL,\n+ `test_id` int(11) DEFAULT NULL,\n+ PRIMARY KEY (`id`)\n+) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n+```\n+\n+You can adjust the name of pivot table via `pivotTable` option in `@ManyToMany` decorator\n+defined on owning side: \n+\n+```typescript\n+// for unidirectional\n+@ManyToMany({ entity: () => Test, owner: true, pivotTable: 'publisher2test' })\n+tests = new Collection<Test>(this);\n+\n+// for bidirectional\n+@ManyToMany({ entity: () => BookTag, inversedBy: 'books', pivotTable: 'book2tag' })\n+tags = new Collection<BookTag>(this);\n+```\n+\n+## Using QueryBuilder to execute native SQL queries\n+\n+When you need to execute some SQL query without all the ORM stuff involved, you can either\n+compose the query yourself, or use the `QueryBuilder` helper to construct the query for you:\n+\n+```typescript\n+const qb = orm.em.createQueryBuilder(Author);\n+qb.update({ name: 'test 123', type: PublisherType.GLOBAL }).where({ id: 123, type: PublisherType.LOCAL });\n+\n+console.log(qb.getQuery());\n+// 'UPDATE `publisher2` SET `name` = ?, `type` = ? WHERE `id` = ? AND `type` = ?'\n+\n+console.log(qb.getParams());\n+// ['test 123', PublisherType.GLOBAL, 123, PublisherType.LOCAL]\n+\n+// run the query\n+const res1 = await qb.execute();\n+\n+// or run query without using QueryBuilder\n+const driver = orm.em.getDriver();\n+const res2 = await driver.execute('SELECT ? + ?', [1, 2]);\n+```\n+\n+`QueryBuilder` provides fluent interface with these methods:\n+\n+```typescript\n+QueryBuilder.select(fields: string | string[], distinct?: boolean): QueryBuilder;\n+QueryBuilder.insert(data: any): QueryBuilder;\n+QueryBuilder.update(data: any): QueryBuilder;\n+QueryBuilder.delete(cond: any): QueryBuilder;\n+QueryBuilder.count(fields: string | string[], distinct?: boolean): QueryBuilder;\n+QueryBuilder.join(field: string, alias?: string): QueryBuilder;\n+QueryBuilder.leftJoin(field: string, alias?: string): QueryBuilder;\n+QueryBuilder.where(cond: any, operator: '$and' | '$or'): QueryBuilder;\n+QueryBuilder.andWhere(cond: any): QueryBuilder;\n+QueryBuilder.orWhere(cond: any): QueryBuilder;\n+QueryBuilder.groupBy(fields: string | string[]): QueryBuilder;\n+QueryBuilder.having(cond: any): QueryBuilder;\n+QueryBuilder.populate(populate: string[]): QueryBuilder;\n+QueryBuilder.limit(limit: number, offset?: number): QueryBuilder;\n+QueryBuilder.offset(offset: number): QueryBuilder;\n+QueryBuilder.getQuery(): string;\n+QueryBuilder.getParams(): any;\n+QueryBuilder.clone(): QueryBuilder;\n+```\n+\n+For more examples of how to work with `QueryBuilder`, take a look at `QueryBuilder` tests in \n+[`tests/QueryBuilder.test.ts`](https://github.com/mikro-orm/mikro-orm/blob/master/tests/QueryBuilder.test.ts).\n+\n+## Transactions\n+\n+When you call `em.flush()`, all computed changes are queried [inside a database\n+transaction](unit-of-work.md) by default, so you do not have to handle transactions manually. \n+\n+When you need to explicitly handle the transaction, you can use `em.transactional(cb)` \n+to run callback in transaction. It will provide forked `EntityManager` as a parameter \n+with clear isolated identity map - please use that to make changes. \n+\n+```typescript\n+// if an error occurs inside the callback, all db queries from inside the callback will be rolled back\n+await orm.em.transactional(async (em: EntityManager) => {\n+ const god = new Author('God', '[email protected]');\n+ await em.persistAndFlush(god);\n+});\n+```\n+\n+## LIKE Queries\n+\n+SQL supports LIKE queries via native JS regular expressions:\n+\n+```typescript\n+const author1 = new Author2('Author 1', '[email protected]');\n+const author2 = new Author2('Author 2', '[email protected]');\n+const author3 = new Author2('Author 3', '[email protected]');\n+await orm.em.persistAndFlush([author1, author2, author3]);\n+\n+// finds authors with email like '%exa%le.c_m'\n+const authors = await orm.em.find(Author2, { email: /exa.*le\\.c.m$/ }); \n+console.log(authors); // all 3 authors found\n+```\n+\n+## Native Collection Methods\n+\n+Sometimes you need to perform some bulk operation, or you just want to populate your\n+database with initial fixtures. Using ORM for such operations can bring unnecessary\n+boilerplate code. In this case, you can use one of `nativeInsert/nativeUpdate/nativeDelete`\n+methods:\n+\n+```typescript\n+em.nativeInsert<T extends AnyEntity>(entityName: string, data: any): Promise<IPrimaryKey>;\n+em.nativeUpdate<T extends AnyEntity>(entityName: string, where: FilterQuery<T>, data: any): Promise<number>;\n+em.nativeDelete<T extends AnyEntity>(entityName: string, where: FilterQuery<T> | any): Promise<number>;\n+```\n+\n+Those methods execute native SQL queries generated via `QueryBuilder` based on entity \n+metadata. Keep in mind that they do not hydrate results to entities, and they do not \n+trigger lifecycle hooks. \n+\n+They are also available as `EntityRepository` shortcuts:\n+\n+```typescript\n+EntityRepository.nativeInsert(data: any): Promise<IPrimaryKey>;\n+EntityRepository.nativeUpdate(where: FilterQuery<T>, data: any): Promise<number>;\n+EntityRepository.nativeDelete(where: FilterQuery<T> | any): Promise<number>;\n+```\n+\n+Additionally there is `execute()` method that supports executing raw SQL queries or `QueryBuilder`\n+instances. To create `QueryBuilder`, you can use `createQueryBuilder()` factory method on both \n+`EntityManager` and `EntityRepository` classes: \n+\n+```typescript\n+const qb = em.createQueryBuilder('Author');\n+qb.select('*').where({ id: { $in: [...] } });\n+const res = await em.getDriver().execute(qb);\n+console.log(res); // unprocessed result of underlying database driver\n+```\n", "using-bigint-pks.md": "@@ -0,0 +1,41 @@\n+---\n+title: Using native BigInt PKs (MySQL and PostgreSQL)\n+---\n+\n+You can use `BigIntType` to support `bigint`s. By default it will represent the value as\n+a `string`. \n+\n+```typescript\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey({ type: BigIntType })\n+ id: string;\n+\n+}\n+```\n+\n+If you want to use native `bigint` type, you will need to create new type based on the\n+`BigIntType`:\n+\n+```typescript\n+export class NativeBigIntType extends BigIntType {\n+\n+ convertToJSValue(value: any): any {\n+ if (!value) {\n+ return value;\n+ }\n+\n+ return BigInt(value);\n+ }\n+\n+}\n+\n+@Entity()\n+export class Book {\n+\n+ @PrimaryKey({ type: NativeBigIntType })\n+ id: bigint;\n+\n+}\n+```\n", "version-4.2-sidebars.json": "@@ -0,0 +1,292 @@\n+{\n+ \"version-4.2/docs\": [\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Overview\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/installation\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/defining-entities\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/relationships\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-manager\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/unit-of-work\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Fundamentals\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/identity-map\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-references\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/collections\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/repositories\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/transactions\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/inheritance-mapping\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/cascading\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/filters\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/deployment\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/query-builder\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/caching\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/debugging\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Advanced Features\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/nested-populate\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/query-conditions\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/propagation\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/loading-strategies\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/serializing\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-helper\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/lifecycle-hooks\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/composite-keys\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/custom-types\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/embeddables\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-schema\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/metadata-providers\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/metadata-cache\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/schema-generator\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-generator\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/naming-strategy\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/property-validation\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/migrations\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/read-connections\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Reference\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-manager-api\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/repositories-api\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/query-builder-api\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/decorators\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/configuration\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Usage with Different Drivers\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/usage-with-sql\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/usage-with-mongo\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Recipes\",\n+ \"items\": [\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/quick-start\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/usage-with-nestjs\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/usage-with-js\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/entity-constructors\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/multiple-schemas\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/using-bigint-pks\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/async-local-storage\"\n+ },\n+ {\n+ \"type\": \"doc\",\n+ \"id\": \"version-4.2/custom-driver\"\n+ }\n+ ]\n+ },\n+ {\n+ \"collapsed\": true,\n+ \"type\": \"category\",\n+ \"label\": \"Example Integrations\",\n+ \"items\": [\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"Express + MongoDB + TypeScript\",\n+ \"href\": \"https://github.com/mikro-orm/express-ts-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"NestJS + MySQL + TypeScript\",\n+ \"href\": \"https://github.com/mikro-orm/nestjs-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"RealWorld example app (Nest + MySQL)\",\n+ \"href\": \"https://github.com/mikro-orm/nestjs-realworld-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"Express + MongoDB + JavaScript\",\n+ \"href\": \"https://github.com/mikro-orm/express-js-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"Koa + SQLite + JavaScript\",\n+ \"href\": \"https://github.com/mikro-orm/koa-ts-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"Inversify + PostgreSQL\",\n+ \"href\": \"https://github.com/PodaruDragos/inversify-example-app\"\n+ },\n+ {\n+ \"type\": \"link\",\n+ \"label\": \"NextJS + MySQL\",\n+ \"href\": \"https://github.com/jonahallibone/mikro-orm-nextjs\"\n+ }\n+ ]\n+ }\n+ ]\n+}\n", "versions.json": "@@ -1,4 +1,5 @@\n [\n+ \"4.2\",\n \"4.1\",\n \"4.0\",\n \"3.6\",\n"}
test(refactor): remove unused markers generalize `min_version` marker
6219d6caee19b6fd3171983c49cd8d6872e3564b
test
https://github.com/rohankumardubey/ibis/commit/6219d6caee19b6fd3171983c49cd8d6872e3564b
remove unused markers generalize `min_version` marker
{"conftest.py": "@@ -1,5 +1,6 @@\n from __future__ import annotations\n \n+import importlib\n import importlib.metadata\n import os\n import platform\n@@ -10,6 +11,7 @@ from typing import TYPE_CHECKING, Any, Iterable, Iterator, TextIO\n import _pytest\n import pandas as pd\n import sqlalchemy as sa\n+from packaging.version import parse as vparse\n \n if TYPE_CHECKING:\n import pyarrow as pa\n@@ -343,7 +345,6 @@ def _get_backends_to_test(\n \n def pytest_runtest_call(item):\n \"\"\"Dynamically add various custom markers.\"\"\"\n- nodeid = item.nodeid\n backend = [\n backend.name()\n for key, backend in item.funcargs.items()\n@@ -364,25 +365,31 @@ def pytest_runtest_call(item):\n \n backend = next(iter(backend))\n \n- for marker in item.iter_markers(name=\"skip_backends\"):\n- if backend in marker.args[0]:\n- pytest.skip(f\"skip_backends: {backend} {nodeid}\")\n-\n- for marker in item.iter_markers(name='min_spark_version'):\n- min_version = marker.args[0]\n- if backend == 'pyspark':\n- from distutils.version import LooseVersion\n-\n- import pyspark\n-\n- if LooseVersion(pyspark.__version__) < LooseVersion(min_version):\n- item.add_marker(\n- pytest.mark.xfail(\n- reason=f'Require minimal spark version {min_version}, '\n- f'but is {pyspark.__version__}',\n- **marker.kwargs,\n- )\n+ for marker in item.iter_markers(name=\"min_version\"):\n+ kwargs = marker.kwargs\n+ if backend not in kwargs:\n+ continue\n+\n+ min_version = kwargs.pop(backend)\n+ reason = kwargs.pop(\"reason\", None)\n+ version = getattr(\n+ importlib.import_module(backend), \"__version__\", None\n+ )\n+ if condition := version is None: # pragma: no cover\n+ if reason is None:\n+ reason = (\n+ f\"{backend} backend module has no __version__ attribute\"\n+ )\n+ else:\n+ condition = vparse(version) < vparse(min_version)\n+ if reason is None:\n+ reason = (\n+ f\"test requires {backend}>={version}; \"\n+ f\"got version {version}\"\n )\n+ else:\n+ reason = f\"{backend}@{version} (<{min_version}): {reason}\"\n+ item.add_marker(pytest.mark.xfail(condition, reason=reason, **kwargs))\n \n # Ibis hasn't exposed existing functionality\n # This xfails so that you know when it starts to pass\n", "test_generic.py": "@@ -1,5 +1,4 @@\n import decimal\n-import importlib\n import io\n import operator\n from contextlib import redirect_stdout\n@@ -8,7 +7,6 @@ import numpy as np\n import pandas as pd\n import pytest\n import toolz\n-from packaging.version import parse as vparse\n from pytest import param\n \n import ibis\n@@ -17,16 +15,6 @@ import ibis.util as util\n from ibis import _\n from ibis import literal as L\n \n-try:\n- import duckdb\n-except ImportError:\n- duckdb = None\n-\n-try:\n- import datafusion\n-except ImportError:\n- datafusion = None\n-\n \n @pytest.mark.parametrize(\n ('expr', 'expected'),\n@@ -38,7 +26,7 @@ except ImportError:\n ],\n )\n @pytest.mark.notimpl([\"datafusion\"])\n-def test_fillna_nullif(backend, con, expr, expected):\n+def test_fillna_nullif(con, expr, expected):\n if expected is None:\n # The exact kind of null value used differs per backend (and version).\n # Example 1: Pandas returns np.nan while BigQuery returns None.\n@@ -119,7 +107,7 @@ def test_fillna(backend, alltypes):\n ),\n ],\n )\n-def test_coalesce(backend, con, expr, expected):\n+def test_coalesce(con, expr, expected):\n result = con.execute(expr)\n \n if isinstance(result, decimal.Decimal):\n@@ -133,7 +121,7 @@ def test_coalesce(backend, con, expr, expected):\n \n # TODO(dask) - identicalTo - #2553\n @pytest.mark.notimpl([\"clickhouse\", \"datafusion\", \"dask\", \"pyspark\"])\n-def test_identical_to(backend, alltypes, con, sorted_df):\n+def test_identical_to(backend, alltypes, sorted_df):\n sorted_alltypes = alltypes.sort_by('id')\n df = sorted_df\n dt = df[['tinyint_col', 'double_col']]\n@@ -205,18 +193,7 @@ def test_notin(backend, alltypes, sorted_df, column, elements):\n lambda t: t['bool_col'],\n lambda df: df['bool_col'],\n id=\"no_op\",\n- marks=pytest.mark.xfail(\n- (\n- datafusion is not None\n- and (\n- # older versions of datafusion don't have a\n- # `__version__` attribute\n- not hasattr(datafusion, \"__version__\")\n- or vparse(datafusion.__version__) < vparse(\"0.5.0\")\n- )\n- ),\n- reason=\"broken on datafusion < 0.5.0\",\n- ),\n+ marks=pytest.mark.min_version(datafusion=\"0.5.0\"),\n ),\n param(\n lambda t: ~t['bool_col'], lambda df: ~df['bool_col'], id=\"negate\"\n@@ -306,10 +283,7 @@ def test_case_where(backend, alltypes, df):\n \n # TODO: some of these are notimpl (datafusion) others are probably never\n @pytest.mark.notimpl([\"datafusion\", \"mysql\", \"sqlite\"])\[email protected](\n- duckdb is not None and vparse(duckdb.__version__) < vparse(\"0.3.3\"),\n- reason=\"<0.3.3 does not support isnan/isinf properly\",\n-)\[email protected]_version(duckdb=\"0.3.3\", reason=\"isnan/isinf unsupported\")\n def test_select_filter_mutate(backend, alltypes, df):\n \"\"\"Test that select, filter and mutate are executed in right order.\n \n@@ -587,25 +561,12 @@ def test_zeroifnull_literals(con, dtype, zero, expected):\n )\n \n \n-DASK_WITH_FIXED_REPLACE = vparse(\"2022.01.1\")\n-\n-\n-def skip_if_dask_replace_is_broken(backend):\n- if (name := backend.name()) != \"dask\":\n- return\n- if (\n- version := vparse(importlib.import_module(name).__version__)\n- ) < DASK_WITH_FIXED_REPLACE:\n- pytest.skip(\n- f\"{name}@{version} doesn't support this operation with later \"\n- \"versions of pandas\"\n- )\n-\n-\n @pytest.mark.notimpl([\"datafusion\"])\[email protected]_version(\n+ dask=\"2022.01.1\",\n+ reason=\"unsupported operation with later versions of pandas\",\n+)\n def test_zeroifnull_column(backend, alltypes, df):\n- skip_if_dask_replace_is_broken(backend)\n-\n expr = alltypes.int_col.nullif(1).zeroifnull()\n result = expr.execute().astype(\"int32\")\n expected = df.int_col.replace(1, 0).rename(\"tmp\").astype(\"int32\")\n", "test_join.py": "@@ -163,9 +163,6 @@ def test_filtering_join(backend, batting, awards_players, how):\n backend.assert_frame_equal(result, expected, check_like=True)\n \n \[email protected]_backends(\n- [\"dask\", \"pandas\"], reason=\"insane memory explosion\"\n-)\n @pytest.mark.notyet(\n [\"pyspark\"],\n reason=\"pyspark doesn't support joining on differing column names\",\n", "test_timecontext.py": "@@ -55,7 +55,7 @@ def ctx_col():\n \n \n @pytest.mark.notimpl([\"dask\", \"duckdb\"])\[email protected]_spark_version('3.1')\[email protected]_version(pyspark=\"3.1\")\n @pytest.mark.parametrize(\n 'window',\n [\n", "test_vectorized_udf.py": "@@ -496,7 +496,7 @@ def test_elementwise_udf_overwrite_destruct_and_assign(\n udf_backend.assert_frame_equal(result, expected, check_like=True)\n \n \[email protected]_spark_version('3.1')\[email protected]_version(pyspark=\"3.1\")\n def test_elementwise_udf_destruct_exact_once(udf_backend, udf_alltypes):\n with tempfile.TemporaryDirectory() as tempdir:\n \n", "poetry.lock": "@@ -802,10 +802,10 @@ optional = false\n python-versions = \">=3.6.1,<4.0\"\n \n [package.extras]\n-plugins = [\"setuptools\"]\n+pipfile_deprecated_finder = [\"pipreqs\", \"requirementslib\"]\n+requirements_deprecated_finder = [\"pipreqs\", \"pip-api\"]\n colors = [\"colorama (>=0.4.3,<0.5.0)\"]\n-requirements_deprecated_finder = [\"pip-api\", \"pipreqs\"]\n-pipfile_deprecated_finder = [\"requirementslib\", \"pipreqs\"]\n+plugins = [\"setuptools\"]\n \n [[package]]\n name = \"jedi\"\n@@ -1801,9 +1801,9 @@ py-cpuinfo = \"*\"\n pytest = \">=3.8\"\n \n [package.extras]\n-aspect = [\"aspectlib\"]\n+histogram = [\"pygaljs\", \"pygal\"]\n elasticsearch = [\"elasticsearch\"]\n-histogram = [\"pygal\", \"pygaljs\"]\n+aspect = [\"aspectlib\"]\n \n [[package]]\n name = \"pytest-clarity\"\n@@ -2077,9 +2077,9 @@ optional = true\n python-versions = \">=3.6\"\n \n [package.extras]\n-all = [\"pytest\", \"pytest-cov\", \"numpy\"]\n-test = [\"pytest\", \"pytest-cov\"]\n vectorized = [\"numpy\"]\n+test = [\"pytest-cov\", \"pytest\"]\n+all = [\"numpy\", \"pytest-cov\", \"pytest\"]\n \n [[package]]\n name = \"six\"\n", "pyproject.toml": "@@ -230,8 +230,7 @@ markers = [\n \"core: tests that do not required a backend\",\n \"geospatial: tests for geospatial functionality\",\n \"hdfs: Hadoop file system tests\",\n- \"min_spark_version: backends tests that require a specific version of pyspark to pass\",\n- \"skip_backends: skip tests on the provided backends\",\n+ \"min_version: backends tests that require a specific version of a dependency to pass\",\n \"notimpl: functionality that isn't implemented in ibis\",\n \"notyet: for functionality that isn't implemented in a backend\",\n \"never: tests for functionality that a backend is likely to never implement\",\n"}
chore: raise coverage
59360860e9905be8af2f1c7c6e98cfdadbe320eb
chore
https://github.com/mikro-orm/mikro-orm/commit/59360860e9905be8af2f1c7c6e98cfdadbe320eb
raise coverage
{"EntityTransformer.ts": "@@ -79,12 +79,18 @@ export class EntityTransformer {\n const property = wrapped.__meta.properties[prop];\n const platform = wrapped.__internal.platform;\n \n- if (property?.serializer) {\n- return property.serializer(entity[prop]);\n+ /* istanbul ignore next */\n+ const serializer = property?.serializer;\n+\n+ if (serializer) {\n+ return serializer(entity[prop]);\n }\n \n- if (property?.customType) {\n- return property.customType.toJSON(entity[prop], platform);\n+ /* istanbul ignore next */\n+ const customType = property?.customType;\n+\n+ if (customType) {\n+ return customType.toJSON(entity[prop], platform);\n }\n \n if (entity[prop] as unknown instanceof ArrayCollection) {\n"}
docs(schema): create Additional Concepts page - Move Drilldowns guide to Additional Concepts page - Move Subquery guide to Additional Concepts page - Move String Time Dimensions guide to Additional Concepts page - Update URLs across documentation - Add redirects from old pages to Additional Concepts page
b59c6712b1fb95839ef126901bf47b3b372deaa6
docs
https://github.com/wzhiqing/cube/commit/b59c6712b1fb95839ef126901bf47b3b372deaa6
create Additional Concepts page - Move Drilldowns guide to Additional Concepts page - Move Subquery guide to Additional Concepts page - Move String Time Dimensions guide to Additional Concepts page - Update URLs across documentation - Add redirects from old pages to Additional Concepts page
{"Examples.md": "@@ -44,15 +44,15 @@ The following tutorials cover advanced concepts of Cube.js:\n \n Learn more about prominent features of Cube.js:\n \n-| Feature | Story | Demo |\n-| :------------------------------------------------------------------------------ | :------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------ |\n-| [Drill downs](https://cube.dev/docs/drill-downs) | [Introducing a drill down table API](https://cube.dev/blog/introducing-a-drill-down-table-api-in-cubejs/) | [Demo](https://drill-downs-demo.cube.dev) |\n-| [Compare date range](https://cube.dev/docs/query-format#time-dimensions-format) | [Comparing data over different time periods](https://cube.dev/blog/comparing-data-over-different-time-periods/) | [Demo](https://compare-date-range-demo.cube.dev) |\n-| [Data blending](https://cube.dev/docs/recipes/data-blending) | [Introducing data blending API](https://cube.dev/blog/introducing-data-blending-api/) | [Demo](https://data-blending-demo.cube.dev) |\n-| [Real-time data fetch](https://cube.dev/docs/real-time-data-fetch) | [Real-time dashboard guide](https://real-time-dashboard.cube.dev) | [Demo](https://real-time-dashboard-demo.cube.dev) |\n-| [Dynamic schema creation](https://cube.dev/docs/dynamic-schema-creation) | [Using asyncModule to generate schemas](https://github.com/cube-js/cube.js/tree/master/examples/async-module-simple) | \u2014 |\n-| [Authentication](https://cube.dev/docs/security#using-json-web-key-sets-jwks) | [Auth0 integration](https://github.com/cube-js/cube.js/tree/master/examples/auth0) | \u2014 |\n-| [Authentication](https://cube.dev/docs/security#using-json-web-key-sets-jwks) | [AWS Cognito integration](https://github.com/cube-js/cube.js/tree/master/examples/cognito) | \u2014 |\n+| Feature | Story | Demo |\n+| :-------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------ |\n+| [Drill downs](https://cube.dev/docs/schema/fundamentals/additional-concepts#drilldowns) | [Introducing a drill down table API](https://cube.dev/blog/introducing-a-drill-down-table-api-in-cubejs/) | [Demo](https://drill-downs-demo.cube.dev) |\n+| [Compare date range](https://cube.dev/docs/query-format#time-dimensions-format) | [Comparing data over different time periods](https://cube.dev/blog/comparing-data-over-different-time-periods/) | [Demo](https://compare-date-range-demo.cube.dev) |\n+| [Data blending](https://cube.dev/docs/recipes/data-blending) | [Introducing data blending API](https://cube.dev/blog/introducing-data-blending-api/) | [Demo](https://data-blending-demo.cube.dev) |\n+| [Real-time data fetch](https://cube.dev/docs/real-time-data-fetch) | [Real-time dashboard guide](https://real-time-dashboard.cube.dev) | [Demo](https://real-time-dashboard-demo.cube.dev) |\n+| [Dynamic schema creation](https://cube.dev/docs/dynamic-schema-creation) | [Using asyncModule to generate schemas](https://github.com/cube-js/cube.js/tree/master/examples/async-module-simple) | \u2014 |\n+| [Authentication](https://cube.dev/docs/security#using-json-web-key-sets-jwks) | [Auth0 integration](https://github.com/cube-js/cube.js/tree/master/examples/auth0) | \u2014 |\n+| [Authentication](https://cube.dev/docs/security#using-json-web-key-sets-jwks) | [AWS Cognito integration](https://github.com/cube-js/cube.js/tree/master/examples/cognito) | \u2014 |\n \n ### Front-end integrations\n \n", "event-analytics.md": "@@ -247,7 +247,8 @@ joins: {\n ```\n \n To determine the end of the session, we\u2019re going to use the\n-[subQuery feature](/subquery) in Cube.js.\n+[`subQuery` feature](/schema/fundamentals/additional-concepts#subquery) in\n+Cube.js.\n \n ```javascript\n // Add the lastEventTimestamp measure to the measures block in the Events cube\n", "Drill-Downs.md": "@@ -1,66 +0,0 @@\n----\n-title: Drill Downs\n-permalink: /drill-downs\n-category: Guides\n-subCategory: Tutorials\n-menuOrder: 27\n----\n-\n-Drill Down is a powerful feature to facilitate data exploration. It allows to\n-build an interface to let users dive deeper into visualizations and data tables.\n-See [ResultSet.drillDown()](@cubejs-client-core#result-set-drill-down) on how to\n-use this feature on the client side.\n-\n-You can follow\n-[this tutorial](https://cube.dev/blog/introducing-a-drill-down-table-api-in-cubejs/)\n-to learn more about building drill downs UI.\n-\n-## Defining a Drill Down in Schema\n-\n-A drill down is defined on the [measure](/schema/reference/measures) level in\n-your data schema. It\u2019s defined as a list of dimensions called **drill members**.\n-Once defined, these drill members will always be used to show underlying data\n-when drilling into that measure.\n-\n-Let\u2019s consider the following example of our imaginary e-commerce store. We have\n-an Orders cube, which describes orders in our store. It\u2019s connected to Users and\n-Products.\n-\n-```javascript\n-cube(`Orders`, {\n- sql: `select * from orders`,\n-\n- joins: {\n- Users: {\n- type: `belongsTo`,\n- sql: `${Orders}.user_id = ${Users}.id`,\n- },\n-\n- Products: {\n- type: `belongsTo`,\n- sql: `${Orders}.product_id = ${Products}.id`,\n- },\n- },\n-\n- measures: {\n- count: {\n- type: `count`,\n- drillMembers: [id, status, Products.name, Users.email],\n- },\n- },\n-\n- dimensions: {\n- id: {\n- type: `number`,\n- sql: `id`,\n- primaryKey: true,\n- shown: true,\n- },\n-\n- status: {\n- type: `string`,\n- sql: `status`,\n- },\n- },\n-});\n-```\n", "String-Time-Dimensions.md": "@@ -1,35 +0,0 @@\n----\n-title: String Time Dimensions\n-permalink: /working-with-string-time-dimensions\n-scope: cubejs\n-category: Guides\n-subCategory: Tutorials\n-menuOrder: 19\n----\n-Cube.js always expects `timestamp with timezone` or compatible type as an input to time dimension.\n-There're a lot of cases when in your underlying fact table datetime information is stored as a string.\n-Hopefully most of SQL backends support datetime parsing which allows you to convert strings into timestamps.\n-\n-Let's consider an example for BigQuery:\n-\n-```javascript\n-cube(`Events`, {\n- sql: `SELECT * FROM schema.events`,\n-\n- // ...\n-\n- dimensions: {\n- date: {\n- sql: `PARSE_TIMESTAMP('%Y-%m-%d', date)`,\n- type: `time`\n- }\n- }\n-});\n-```\n-\n-In this particular cube `date` column will be parsed using `%Y-%m-%d` format.\n-Please note that as we do not pass timezone parameter to `PARSE_TIMESTAMP` it'll set default `UTC` timezone.\n-You should always set timezone appropriately for parsed timestamps as Cube.js always does timezone conversions according to user settings.\n-\n-Although query performance of big data backends like BigQuery or Presto won't likely suffer from date parsing, performance of RDBMS backends like Postgres most likely will.\n-Adding timestamp columns with indexes should be considered in this case.\n", "Subquery.md": "@@ -1,154 +0,0 @@\n----\n-title: Subquery\n-permalink: /subquery\n-scope: cubejs\n-category: Guides\n-subCategory: Tutorials\n-menuOrder: 16\n----\n-\n-[comment]: # (PROOFREAD: DONE)\n-\n-You can use subquery dimensions to **reference measures from other cubes inside a dimension**. Under the hood, it behaves [like the correlated subquery](https://en.wikipedia.org/wiki/Correlated_subquery), but is implemented via joins for performance optimization and portability.\n-\n-<!-- prettier-ignore-start -->\n-[[warning | Warning]]\n-| You cannot use subquery dimensions to reference measures from the same cube.\n-<!-- prettier-ignore-end -->\n-\n-Consider the following data schema, where we have `Deals` and `Sales Managers`. `Deals` belong to `Sales Managers` and have the `amount` dimension. What we want is to calculate the amount of deals for `Sales Managers`.\n-\n-![subquery-1.png](https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Guides/subquery-1.png)\n-\n-\n-To calculate the deals amount for sales managers in pure SQL, we can use the correlated subquery, which will look like this:\n-\n-```sql\n-SELECT\n- id,\n- (SELECT sum(amount) FROM deals WHERE deals.sales_manager_id = sales_managers.id) as deals_amount\n-FROM sales_managers\n-GROUPD BY 1\n-```\n-\n-Cube.js makes subqueries easy and efficient. Subqueries are defined as regular dimensions with the parameter `subQuery` set to true.\n-\n-```javascript\n-cube(`Deals`, {\n- sql: `select * from deals`,\n-\n- measures: {\n- amount: {\n- sql: `amount`,\n- type: `sum`\n- }\n- }\n-});\n-\n-cube(`SalesManagers`, {\n- sql: `select * from sales_managers`,\n-\n- joins: {\n- Deals: {\n- relationship: `hasMany`,\n- sql: `${SalesManagers}.id = ${Deals}.sales_manager_id`\n- }\n- },\n-\n- measures: {\n- averageDealAmount: {\n- sql: `${dealsAmount}`,\n- type: `avg`\n- }\n- },\n-\n- dimensions: {\n- id: {\n- sql: `id`,\n- type: `string`,\n- primaryKey: true\n- },\n-\n- dealsAmount: {\n- sql: `${Deals.amount}`,\n- type: `number`,\n- subQuery: true\n- }\n- }\n-});\n-```\n-\n-A subquery requires referencing at least one measure in the definition.\n-Generally speaking all measures involved in defining particular subquery dimension should be defined as measures first and then referenced from a subquery dimension.\n-For example the following schema **will not work**:\n-\n-```javascript\n-cube(`Deals`, {\n- sql: `select * from deals`,\n-\n- measures: {\n- amount: {\n- sql: `amount`,\n- type: `sum`\n- }\n- }\n-});\n-\n-cube(`SalesManagers`, {\n- // ...\n- dimensions: {\n- // ...\n- dealsAmount: {\n- sql: `sum(${Deals}.amount)`, // !!! Doesn't work!\n- type: `number`,\n- subQuery: true\n- }\n- }\n-});\n-```\n-\n-You can **reference subquery dimensions in measures as usual dimensions**. The example below shows the definition of an average deal amount per sales manager:\n-\n-```javascript\n-cube(`SalesManagers`, {\n- measures: {\n- averageDealsAmount: {\n- sql: `${dealsAmount}`,\n- type: `avg`\n- }\n- },\n-\n- dimensions: {\n- id: {\n- sql: `id`,\n- type: `string`,\n- primaryKey: true\n- }\n- }\n-});\n-```\n-\n-## Under the hood\n-\n-Based on the subquery dimension definition, Cube.js will create a query that will include the primary key dimension of the main cube and all measures and dimensions included in the SQL definition of the subquery dimension.\n-\n-This query will be joined as a left join to the main SQL query.\n-For example, when using the `SalesManagers.dealsAmount` subquery dimension, the following query will be generated:\n-\n-```javascript\n-{\n- measures: ['SalesManagers.dealsAmount'],\n- dimensions: ['SalesManagers.id']\n-}\n-```\n-\n-In case of `{ measures: ['SalesManagers.averageDealAmount'] }` query following SQL will be generated:\n-\n-```javascript\n-SELECT avg(sales_managers__average_deal_amount) FROM sales_managers\n-LEFT JOIN (\n- SELECT sales_managers.id sales_managers__id, sum(deals.amount) sales_managers__average_deal_amount FROM sales_managers\n- LEFT JOIN deals ON sales_managers.id = deals.sales_manager_id\n- GROUP BY 1\n-) sales_managers__average_deal_amount_subquery ON sales_managers__average_deal_amount_subquery.sales_managers__id = sales_managers.id\n-```\n", "Code-Reusability-Export-and-Import.md": "@@ -92,4 +92,5 @@ cube(`Events`, {\n https://developer.mozilla.org/en-US/docs/web/javascript/reference/statements/export\n [mdn-js-es6-import]:\n https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import\n-[ref-schema-string-time-dims]: /working-with-string-time-dimensions\n+[ref-schema-string-time-dims]:\n+ /schema/fundamentals/additional-concepts#string-time-dimensions\n", "Additional-Concepts.md": "@@ -0,0 +1,298 @@\n+---\n+title: Additional Concepts\n+permalink: /schema/fundamentals/additional-concepts\n+category: Data Schema\n+subCategory: Fundamentals\n+menuOrder: 3\n+redirect_from:\n+ - /drill-downs\n+ - /subquery\n+ - /working-with-string-time-dimensions\n+---\n+\n+## Drilldowns\n+\n+Drilldowns are a powerful feature to facilitate data exploration. It allows\n+building an interface to let users dive deeper into visualizations and data\n+tables. See [`ResultSet.drillDown()`][ref-cubejs-client-ref-resultset-drilldown]\n+on how to use this feature on the client side.\n+\n+A drilldown is defined on the [measure][ref-schema-ref-measures] level in your\n+data schema. It\u2019s defined as a list of dimensions called **drill members**. Once\n+defined, these drill members will always be used to show underlying data when\n+drilling into that measure.\n+\n+Let\u2019s consider the following example of our imaginary e-commerce store. We have\n+an Orders cube, which describes orders in our store. It\u2019s connected to Users and\n+Products.\n+\n+```javascript\n+cube(`Orders`, {\n+ sql: `select * from orders`,\n+\n+ joins: {\n+ Users: {\n+ type: `belongsTo`,\n+ sql: `${Orders}.user_id = ${Users}.id`,\n+ },\n+\n+ Products: {\n+ type: `belongsTo`,\n+ sql: `${Orders}.product_id = ${Products}.id`,\n+ },\n+ },\n+\n+ measures: {\n+ count: {\n+ type: `count`,\n+ // Here we define all possible properties we might want\n+ // to \"drill down\" on from our front-end\n+ drillMembers: [id, status, Products.name, Users.email],\n+ },\n+ },\n+\n+ dimensions: {\n+ id: {\n+ type: `number`,\n+ sql: `id`,\n+ primaryKey: true,\n+ shown: true,\n+ },\n+\n+ status: {\n+ type: `string`,\n+ sql: `status`,\n+ },\n+ },\n+});\n+```\n+\n+You can follow [this tutorial][blog-drilldown-api] to learn more about building\n+a UI for drilldowns.\n+\n+## Subquery\n+\n+You can use subquery dimensions to reference [measures][ref-schema-ref-measures]\n+from other cubes inside a [dimension][ref-schema-ref-dimensions]. Under the\n+hood, it behaves [as a correlated subquery][wiki-correlated-subquery], but is\n+implemented via joins for optimal performance and portability.\n+\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| You cannot use subquery dimensions to reference measures from the same cube.\n+<!-- prettier-ignore-end -->\n+\n+Consider the following tables, where we have `deals` and `sales_managers`.\n+`deals` belong to `sales_managers` and have the `amount` dimension. What we want\n+is to calculate the amount of deals for `sales_managers`:\n+\n+<div\n+ style=\"text-align: center\"\n+>\n+ <img\n+ alt=\"Subquery Example with Deals and SalesManager cubes\"\n+ src=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Schema/Fundamentals/subquery.png\"\n+ style=\"border: none\"\n+ width=\"100%\"\n+ />\n+</div>\n+\n+To calculate the deals amount for sales managers in pure SQL, we can use a\n+correlated subquery, which will look like this:\n+\n+```sql\n+SELECT\n+ id,\n+ (SELECT SUM(amount) FROM deals WHERE deals.sales_manager_id = sales_managers.id) AS deals_amount\n+FROM sales_managers\n+GROUP BY 1\n+```\n+\n+Cube.js makes subqueries easy and efficient. Subqueries are defined as regular\n+dimensions with the parameter `subQuery` set to true.\n+\n+```javascript\n+cube(`Deals`, {\n+ sql: `SELECT * FROM deals`,\n+\n+ measures: {\n+ amount: {\n+ sql: `amount`,\n+ type: `sum`,\n+ },\n+ },\n+});\n+\n+cube(`SalesManagers`, {\n+ sql: `SELECT * FROM sales_managers`,\n+\n+ joins: {\n+ Deals: {\n+ relationship: `hasMany`,\n+ sql: `${SalesManagers}.id = ${Deals}.sales_manager_id`,\n+ },\n+ },\n+\n+ measures: {\n+ averageDealAmount: {\n+ sql: `${dealsAmount}`,\n+ type: `avg`,\n+ },\n+ },\n+\n+ dimensions: {\n+ id: {\n+ sql: `id`,\n+ type: `string`,\n+ primaryKey: true,\n+ },\n+\n+ dealsAmount: {\n+ sql: `${Deals.amount}`,\n+ type: `number`,\n+ subQuery: true,\n+ },\n+ },\n+});\n+```\n+\n+A subquery requires referencing at least one [measure][ref-schema-ref-measures]\n+in its definition. Generally speaking, all the columns used to define a subquery\n+dimension should first be defined as [measures][ref-schema-ref-measures] on\n+their respective cubes and then referenced from a subquery dimension over a\n+[join][ref-schema-ref-joins]. For example the following schema will **not**\n+work:\n+\n+```javascript\n+cube(`Deals`, {\n+ sql: `select * from deals`,\n+\n+ measures: {\n+ count: {\n+ type: `count`,\n+ },\n+ },\n+});\n+\n+cube(`SalesManagers`, {\n+ // ...\n+ dimensions: {\n+ // ...\n+ dealsAmount: {\n+ sql: `SUM(${Deals}.amount)`, // Doesn't work, because `amount` is not a measure on `Deals`\n+ type: `number`,\n+ subQuery: true,\n+ },\n+ },\n+});\n+```\n+\n+You can reference subquery dimensions in measures as usual\n+[dimensions][ref-schema-ref-dimensions]. The example below shows the definition\n+of an average deal amount per sales manager:\n+\n+```javascript\n+cube(`SalesManagers`, {\n+ measures: {\n+ averageDealsAmount: {\n+ sql: `${dealsAmount}`,\n+ type: `avg`,\n+ },\n+ },\n+\n+ dimensions: {\n+ id: {\n+ sql: `id`,\n+ type: `string`,\n+ primaryKey: true,\n+ },\n+ dealsAmount: {\n+ sql: `${Deals.amount}`,\n+ type: `number`,\n+ subQuery: true,\n+ },\n+ },\n+});\n+```\n+\n+### Under the hood\n+\n+Based on the subquery dimension definition, Cube.js will create a query that\n+will include the primary key dimension of the main cube and all\n+[measures][ref-schema-ref-measures] and [dimensions][ref-schema-ref-dimensions]\n+included in the SQL definition of the subquery dimension.\n+\n+This query will be joined as a `LEFT JOIN` onto the main SQL query. For example,\n+when using the `SalesManagers.dealsAmount` subquery dimension, the following\n+query will be generated:\n+\n+```json\n+{\n+ \"measures\": [\"SalesManagers.dealsAmount\"],\n+ \"dimensions\": [\"SalesManagers.id\"]\n+}\n+```\n+\n+If a query includes the `SalesManagers.averageDealAmount` measure, the following\n+SQL will be generated:\n+\n+```sql\n+SELECT\n+ AVG(sales_managers__average_deal_amount)\n+FROM sales_managers\n+LEFT JOIN (\n+ SELECT\n+ sales_managers.id sales_managers__id,\n+ SUM(deals.amount) sales_managers__average_deal_amount\n+ FROM sales_managers\n+ LEFT JOIN deals\n+ ON sales_managers.id = deals.sales_manager_id\n+ GROUP BY 1\n+) sales_managers__average_deal_amount_subquery\n+ ON sales_managers__average_deal_amount_subquery.sales_managers__id = sales_managers.id\n+```\n+\n+## String Time Dimensions\n+\n+Cube.js always expects a timestamp with timezone (or compatible type) as an\n+input to the time dimension. However, there are a lot of cases when the\n+underlying table's datetime information is stored as a string. Most SQL\n+databases support datetime parsing which allows converting strings to\n+timestamps. Let's consider an example cube for BigQuery:\n+\n+```javascript\n+cube(`Events`, {\n+ sql: `SELECT * FROM schema.events`,\n+\n+ // ...\n+\n+ dimensions: {\n+ date: {\n+ sql: `PARSE_TIMESTAMP('%Y-%m-%d', date)`,\n+ type: `time`,\n+ },\n+ },\n+});\n+```\n+\n+In this particular cube, the `date` column will be parsed using the `%Y-%m-%d`\n+format. Please note that as we do not pass timezone parameter to\n+[`PARSE_TIMESTAMP`][bq-parse-timestamp], it will set `UTC` as the timezone by\n+default. You should always set timezone appropriately for parsed timestamps as\n+Cube.js always does timezone conversions according to user settings.\n+\n+Although query performance of big data backends like BigQuery or Presto won't\n+likely suffer from date parsing, performance of RDBMS backends like Postgres\n+most likely will. Adding timestamp columns with indexes should strongly be\n+considered in this case.\n+\n+[blog-drilldown-api]:\n+ https://cube.dev/blog/introducing-a-drill-down-table-api-in-cubejs/\n+[bq-parse-timestamp]:\n+ https://cloud.google.com/bigquery/docs/reference/standard-sql/timestamp_functions#parse_timestamp\n+[ref-cubejs-client-ref-resultset-drilldown]:\n+ /@cubejs-client-core#result-set-drill-down\n+[ref-schema-ref-dimensions]: /schema/reference/dimensions\n+[ref-schema-ref-joins]: /schema/reference/joins\n+[ref-schema-ref-measures]: /schema/reference/measures\n+[wiki-correlated-subquery]: https://en.wikipedia.org/wiki/Correlated_subquery\n", "subquery.png": "", "dimensions.md": "@@ -237,5 +237,5 @@ cube('Products', {\n ```\n \n [ref-schema-ref-joins]: /schema/reference/joins\n-[ref-subquery]: /subquery\n+[ref-subquery]: /schema/fundamentals/additional-concepts#subquery\n [self-subquery]: #parameters-sub-query\n", "measures.md": "@@ -278,4 +278,4 @@ join will be created automatically.\n /schema/reference/types-and-formats#measures-types\n [ref-schema-ref-types-formats-measures-formats]:\n /schema/reference/types-and-formats#measures-formats\n-[ref-drilldowns]: /drill-downs\n+[ref-drilldowns]: /schema/fundamentals/additional-concepts#drilldowns\n", "types-and-formats.md": "@@ -455,8 +455,9 @@ cube('Orders', {\n });\n ```\n \n-[ref-string-time-dims]: /working-with-string-time-dimensions\n+[ref-string-time-dims]:\n+ /schema/fundamentals/additional-concepts#string-time-dimensions\n [ref-schema-ref-preaggs-rollup]:\n /schema/reference/pre-aggregations#parameters-type-rollup\n [ref-schema-ref-calc-measures]: /schema/reference/measures#calculated-measures\n-[ref-drilldowns]: /drill-downs\n+[ref-drilldowns]: /schema/fundamentals/additional-concepts#drilldowns\n"}
test: fix e2e tests for zero concurrency
663595da6be2e3eda6e33e8b7beb3c4a11c87fc6
test
https://github.com/Hardeepex/crawlee/commit/663595da6be2e3eda6e33e8b7beb3c4a11c87fc6
fix e2e tests for zero concurrency
{"main.js": "@@ -1,5 +1,5 @@\n+import { CheerioCrawler, log, RequestQueueV1 } from '@crawlee/cheerio';\n import { Actor } from 'apify';\n-import { CheerioCrawler, log, RequestQueue } from '@crawlee/cheerio';\n \n log.setLevel(log.LEVELS.DEBUG);\n \n@@ -12,7 +12,7 @@ const mainOptions = {\n \n // RequestQueue auto-reset when stuck with requests in progress\n await Actor.main(async () => {\n- const requestQueue = await RequestQueue.open();\n+ const requestQueue = await RequestQueueV1.open();\n await requestQueue.addRequest({ url: 'https://example.com/?q=1' });\n await requestQueue.addRequest({ url: 'https://example.com/?q=2' });\n const r3 = await requestQueue.addRequest({ url: 'https://example.com/?q=3' });\n"}
refactor: fork EM in the seeder manager so we dont use global context
022a1cc45de077969c9adc659b8bb7ed896e3bc1
refactor
https://github.com/mikro-orm/mikro-orm/commit/022a1cc45de077969c9adc659b8bb7ed896e3bc1
fork EM in the seeder manager so we dont use global context
{"seed-manager.ts": "@@ -10,6 +10,7 @@ export class SeedManager implements ISeedManager {\n private readonly absolutePath = Utils.absolutePath(this.options.path, this.config.get('baseDir'));\n \n constructor(private readonly em: EntityManager) {\n+ this.em = this.em.fork();\n }\n \n async seed(...seederClasses: { new(): Seeder }[]): Promise<void> {\n"}
chore: refactor one test to use `expr` instead of `as any`
2543e5a9aa58e8e5e5f8e16d4e0d574d45f73e0e
chore
https://github.com/mikro-orm/mikro-orm/commit/2543e5a9aa58e8e5e5f8e16d4e0d574d45f73e0e
refactor one test to use `expr` instead of `as any`
{"EntityManager.mysql.test.ts": "@@ -7,7 +7,7 @@ import chalk from 'chalk';\n import {\n Collection, Configuration, EntityManager, LockMode, MikroORM, QueryFlag, QueryOrder, Reference, Logger, ValidationError, wrap,\n UniqueConstraintViolationException, TableNotFoundException, TableExistsException, SyntaxErrorException,\n- NonUniqueFieldNameException, InvalidFieldNameException,\n+ NonUniqueFieldNameException, InvalidFieldNameException, expr,\n } from '@mikro-orm/core';\n import { MySqlDriver, MySqlConnection } from '@mikro-orm/mysql';\n import { Author2, Book2, BookTag2, FooBar2, FooBaz2, Publisher2, PublisherType, Test2 } from './entities-sql';\n@@ -739,7 +739,7 @@ describe('EntityManagerMySql', () => {\n expect(res2.created_at).toBeDefined();\n expect(res2.meta).toEqual({ category: 'foo', items: 1 });\n \n- const res3 = (await orm.em.findOne(Book2, { 'JSON_CONTAINS(meta, ?)': [{ items: 1 }, true] } as any))!;\n+ const res3 = await orm.em.findOneOrFail(Book2, { [expr('JSON_CONTAINS(meta, ?)')]: [{ items: 1 }, true] });\n expect(res3).toBeInstanceOf(Book2);\n expect(res3.createdAt).toBeDefined();\n expect(res3.meta).toEqual({ category: 'foo', items: 1 });\n"}
refactor: use `Utils.requireFrom()` to load entities
98e402871d27850d5bb7c763ea9595a2e35a4202
refactor
https://github.com/mikro-orm/mikro-orm/commit/98e402871d27850d5bb7c763ea9595a2e35a4202
use `Utils.requireFrom()` to load entities
{"MikroORM.ts": "@@ -29,7 +29,7 @@ export class MikroORM<D extends IDatabaseDriver = IDatabaseDriver> {\n \n const orm = new MikroORM<D>(options!);\n const discovery = new MetadataDiscovery(MetadataStorage.init(), orm.driver.getPlatform(), orm.config);\n- orm.metadata = await discovery.discover();\n+ orm.metadata = await discovery.discover(orm.config.get('tsNode'));\n orm.driver.setMetadata(orm.metadata);\n orm.em = orm.driver.createEntityManager<D>();\n orm.metadata.decorate(orm.em);\n", "MetadataDiscovery.ts": "@@ -695,8 +695,7 @@ export class MetadataDiscovery {\n \n private initEnumValues(prop: EntityProperty, path: string): void {\n path = Utils.normalizePath(this.config.get('baseDir'), path);\n- // eslint-disable-next-line @typescript-eslint/no-var-requires\n- const exports = require(path);\n+ const exports = Utils.requireFrom(path, process.cwd());\n const target = exports[prop.type] || exports.default;\n \n if (target) {\n@@ -746,8 +745,7 @@ export class MetadataDiscovery {\n }\n \n private getEntityClassOrSchema(path: string, name: string) {\n- // eslint-disable-next-line @typescript-eslint/no-var-requires\n- const exports = require(path);\n+ const exports = Utils.requireFrom(path, process.cwd());\n const target = exports.default || exports[name];\n const schema = Object.values(exports).find(item => item instanceof EntitySchema);\n \n", "ConfigurationLoader.ts": "@@ -15,9 +15,7 @@ export class ConfigurationLoader {\n path = Utils.normalizePath(path);\n \n if (await pathExists(path)) {\n- // eslint-disable-next-line @typescript-eslint/no-var-requires\n- const config = require(path);\n-\n+ const config = Utils.requireFrom(path, process.cwd());\n return new Configuration({ ...(config.default || config), ...options }, validate);\n }\n }\n"}
ci(dev-tools): add PR title and body linter to verify conventional commits and allow squash merges
09e80b976415059d7421edc34ba0122829d3361d
ci
https://github.com/rohankumardubey/ibis/commit/09e80b976415059d7421edc34ba0122829d3361d
add PR title and body linter to verify conventional commits and allow squash merges
{"ibis-docs-lint.yml": "@@ -21,25 +21,6 @@ permissions:\n contents: read\n \n jobs:\n- commitlint:\n- runs-on: ubuntu-latest\n- if: github.event_name == 'pull_request'\n- steps:\n- - name: checkout\n- uses: actions/checkout@v4\n- with:\n- fetch-depth: 0\n-\n- - name: install nix\n- uses: cachix/install-nix-action@v24\n- with:\n- nix_path: nixpkgs=channel:nixos-unstable-small\n- extra_nix_config: |\n- access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}\n-\n- - name: lint commits\n- run: nix run 'nixpkgs#commitlint' -- --from=${{ github.event.pull_request.base.sha }} --to=${{ github.sha }} --verbose\n-\n lint:\n runs-on: ubuntu-latest\n steps:\n", "pr-title.yml": "@@ -0,0 +1,53 @@\n+name: Conventional commits check\n+\n+on:\n+ # runs on `pull_request_target` events so that commenting on the PR is allowed\n+ pull_request_target:\n+ types: [opened, edited, synchronize, reopened]\n+\n+jobs:\n+ commitlint:\n+ name: Check PR title conforms to semantic-release\n+ runs-on: ubuntu-latest\n+ steps:\n+ - name: install node\n+ uses: actions/setup-node@v4\n+ with:\n+ node-version: \"20\"\n+\n+ - name: checkout code to pick up commitlint configuration\n+ uses: actions/checkout@v4\n+ with:\n+ ref: ${{ github.event.pull_request.head.sha }}\n+\n+ - name: install deps\n+ run: npm install \"@commitlint/config-conventional\"\n+\n+ - name: run commitlint\n+ run: npx commitlint --extends \"@commitlint/config-conventional\" --verbose <<< \"$COMMIT_MSG\"\n+ env:\n+ COMMIT_MSG: >\n+ ${{ github.event.pull_request.title }}\n+\n+ ${{ github.event.pull_request.body }}\n+\n+ - name: find existing comment\n+ if: failure()\n+ uses: peter-evans/find-comment@v2\n+ id: fc\n+ with:\n+ issue-number: ${{ github.event.pull_request.number }}\n+ body-regex: '\\*\\*ACTION NEEDED\\*\\*.+'\n+\n+ - name: post a message if the pull request title and body fail `commitlint`\n+ if: steps.fc.outputs.comment-body == ''\n+ uses: peter-evans/create-or-update-comment@v3\n+ with:\n+ body: |\n+ **ACTION NEEDED**\n+\n+ Ibis follows the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) for release automation.\n+\n+ The PR title and description are used as the merge commit message.\n+\n+ Please update your PR title and description to match the specification.\n"}
fix(migrations): do not interact with the database when snapshot exists
48df46219811e33c296ad3bd182a95702d3a2007
fix
https://github.com/mikro-orm/mikro-orm/commit/48df46219811e33c296ad3bd182a95702d3a2007
do not interact with the database when snapshot exists
{"SchemaGenerator.ts": "@@ -180,7 +180,10 @@ export class SchemaGenerator extends AbstractSchemaGenerator<AbstractSqlDriver>\n }\n \n async getUpdateSchemaMigrationSQL(options: { wrap?: boolean; safe?: boolean; dropTables?: boolean; fromSchema?: DatabaseSchema; schema?: string } = {}): Promise<{ up: string; down: string }> {\n- await this.ensureDatabase();\n+ if (!options.fromSchema) {\n+ await this.ensureDatabase();\n+ }\n+\n const { fromSchema, toSchema } = await this.prepareSchemaForComparison(options);\n const comparator = new SchemaComparator(this.platform);\n const diffUp = comparator.compare(fromSchema, toSchema);\n", "Migrator.ts": "@@ -241,9 +241,9 @@ export class Migrator implements IMigrator {\n };\n }\n \n- protected async getCurrentSchema(): Promise<DatabaseSchema> {\n+ protected async getSchemaFromSnapshot() {\n if (!this.options.snapshot || !await pathExists(this.snapshotPath)) {\n- return DatabaseSchema.create(this.driver.getConnection(), this.driver.getPlatform(), this.config);\n+ return undefined;\n }\n \n const data = await Utils.dynamicImport(this.snapshotPath);\n@@ -300,7 +300,7 @@ export class Migrator implements IMigrator {\n wrap: false,\n safe: this.options.safe,\n dropTables: this.options.dropTables,\n- fromSchema: await this.getCurrentSchema(),\n+ fromSchema: await this.getSchemaFromSnapshot(),\n });\n up.push(...diff.up.split('\\n'));\n down.push(...diff.down.split('\\n'));\n"}
refactor(ir): split out table count into `CountStar` operation
e812e6e6fb98832854aab90d3051aae8e207633e
refactor
https://github.com/ibis-project/ibis/commit/e812e6e6fb98832854aab90d3051aae8e207633e
split out table count into `CountStar` operation
{"registry.py": "@@ -206,7 +206,6 @@ operation_registry.update(\n ops.ExtractMinute: _extract('minute'),\n ops.ExtractSecond: _extract('second'),\n ops.ExtractMillisecond: _extract('millisecond'),\n- # reductions\n ops.TimestampNow: fixed_arity(sa.func.now, 0),\n # others\n ops.GroupConcat: _group_concat,\n", "main.py": "@@ -227,6 +227,15 @@ def sort_key(translator, op):\n return f\"{translator.translate(op.expr)}{sort_direction}\"\n \n \n+def count_star(translator, op):\n+ return aggregate._reduction_format(\n+ translator,\n+ \"count\",\n+ op.where,\n+ ops.Literal(value=1, dtype=dt.int64),\n+ )\n+\n+\n binary_infix_ops = {\n # Binary operations\n ops.Add: binary_infix.binary_infix_op('+'),\n@@ -309,6 +318,7 @@ operation_registry = {\n ops.Variance: aggregate.variance_like('var'),\n ops.GroupConcat: aggregate.reduction('group_concat'),\n ops.Count: aggregate.reduction('count'),\n+ ops.CountStar: count_star,\n ops.CountDistinct: aggregate.count_distinct,\n # string operations\n ops.StringConcat: concat,\n", "test_select.py": "@@ -194,7 +194,7 @@ def test_complex_array_expr_projection(db, alltypes):\n name = expr2.get_name()\n expected = f\"\"\"SELECT CAST(`string_col` AS Nullable(Float64)) AS `{name}`\n FROM (\n- SELECT `string_col`, count(*) AS `count`\n+ SELECT `string_col`, count() AS `count`\n FROM {db.name}.`functional_alltypes`\n GROUP BY `string_col`\n ) t0\"\"\"\n", "generic.py": "@@ -629,8 +629,8 @@ def execute_std_series_groupby_mask(op, data, mask, aggcontext=None, **kwargs):\n )\n \n \n-@execute_node.register(ops.Count, DataFrameGroupBy, type(None))\n-def execute_count_frame_groupby(op, data, _, **kwargs):\n+@execute_node.register(ops.CountStar, DataFrameGroupBy, type(None))\n+def execute_count_star_frame_groupby(op, data, _, **kwargs):\n result = data.size()\n # FIXME(phillipc): We should not hard code this column name\n result.name = 'count'\n@@ -725,11 +725,16 @@ def execute_notall_series(op, data, aggcontext=None, **kwargs):\n return result\n \n \n-@execute_node.register(ops.Count, pd.DataFrame, type(None))\n-def execute_count_frame(op, data, _, **kwargs):\n+@execute_node.register(ops.CountStar, pd.DataFrame, type(None))\n+def execute_count_star_frame(op, data, _, **kwargs):\n return len(data)\n \n \n+@execute_node.register(ops.CountStar, pd.DataFrame, pd.Series)\n+def execute_count_star_frame_filter(op, data, where, **kwargs):\n+ return len(data) - (len(where) - where.sum())\n+\n+\n @execute_node.register(ops.BitAnd, pd.Series, (pd.Series, type(None)))\n def execute_bit_and_series(_, data, mask, aggcontext=None, **kwargs):\n return aggcontext.agg(\n", "compiler.py": "@@ -502,7 +502,7 @@ def compile_any(t, op, *, aggcontext=None, **kwargs):\n \n \n @compiles(ops.NotAny)\n-def compile_notany(t, op, *, aggcontext=None, **kwargs):\n+def compile_notany(t, op, *args, aggcontext=None, **kwargs):\n # The code here is a little ugly because the translation are different\n # with different context.\n # When translating col.notany() (context is None), we returns the dataframe\n@@ -516,15 +516,15 @@ def compile_notany(t, op, *, aggcontext=None, **kwargs):\n return ~(F.max(col))\n \n return compile_aggregator(\n- t, op, fn=fn, aggcontext=aggcontext, **kwargs\n+ t, op, *args, fn=fn, aggcontext=aggcontext, **kwargs\n )\n else:\n- return ~compile_any(t, op, aggcontext=aggcontext, **kwargs)\n+ return ~compile_any(t, op, *args, aggcontext=aggcontext, **kwargs)\n \n \n @compiles(ops.All)\n-def compile_all(t, op, **kwargs):\n- return compile_aggregator(t, op, fn=F.min, **kwargs)\n+def compile_all(t, op, *args, **kwargs):\n+ return compile_aggregator(t, op, *args, fn=F.min, **kwargs)\n \n \n @compiles(ops.NotAll)\n@@ -542,17 +542,25 @@ def compile_notall(t, op, *, aggcontext=None, **kwargs):\n return ~compile_all(t, op, aggcontext=aggcontext, **kwargs)\n \n \n-def _count_star(_):\n- return F.count(F.lit(1))\n-\n-\n @compiles(ops.Count)\n def compile_count(t, op, **kwargs):\n- if _is_table(op):\n- fn = _count_star\n+ return compile_aggregator(t, op, fn=F.count, **kwargs)\n+\n+\n+@compiles(ops.CountStar)\n+def compile_count_star(t, op, aggcontext=None, **kwargs):\n+ src_table = t.translate(op.arg, **kwargs)\n+\n+ src_col = F.lit(1)\n+\n+ if (where := op.where) is not None:\n+ src_col = F.when(t.translate(where, **kwargs), src_col)\n+\n+ col = F.count(src_col)\n+ if aggcontext is not None:\n+ return col\n else:\n- fn = F.count\n- return compile_aggregator(t, op, fn=fn, **kwargs)\n+ return src_table.select(col)\n \n \n @compiles(ops.Max)\n@@ -1057,7 +1065,7 @@ def compile_string_like(t, op, **kwargs):\n return src_column.like(pattern)\n \n \n-@compiles(ops.NodeList)\n+@compiles(ops.ValueList)\n def compile_value_list(t, op, **kwargs):\n kwargs[\"raw\"] = False # override to force column literals\n return [t.translate(col, **kwargs) for col in op.values]\n", "test_bucket_histogram.py": "@@ -209,7 +209,7 @@ FROM (\n WHEN (10 <= `f`) AND (`f` < 25) THEN 2\n WHEN (25 <= `f`) AND (`f` <= 50) THEN 3\n ELSE CAST(NULL AS tinyint)\n- END AS `tier`, count(*) AS `count`\n+ END AS `tier`, count(1) AS `count`\n FROM alltypes\n GROUP BY 1\n ) t0\"\"\"\n", "test_exprs.py": "@@ -723,7 +723,7 @@ def test_filter_with_analytic():\n expected = \"\"\"\\\n SELECT `col`, `analytic`\n FROM (\n- SELECT `col`, count(*) OVER () AS `analytic`\n+ SELECT `col`, count(1) OVER () AS `analytic`\n FROM (\n SELECT `col`, `filter`\n FROM (\n", "test_sql.py": "@@ -84,7 +84,7 @@ def test_nested_join_base():\n \n expected = \"\"\"\\\n WITH t0 AS (\n- SELECT `uuid`, count(*) AS `count`\n+ SELECT `uuid`, count(1) AS `count`\n FROM t\n GROUP BY 1\n )\n@@ -120,7 +120,7 @@ def test_nested_joins_single_cte():\n \n expected = \"\"\"\\\n WITH t0 AS (\n- SELECT `uuid`, count(*) AS `count`\n+ SELECT `uuid`, count(1) AS `count`\n FROM t\n GROUP BY 1\n )\n@@ -321,7 +321,7 @@ WITH t0 AS (\n FROM t2\n ),\n t1 AS (\n- SELECT `d`, CAST(`d` / 15 AS bigint) AS `idx`, `c`, count(*) AS `row_count`\n+ SELECT `d`, CAST(`d` / 15 AS bigint) AS `idx`, `c`, count(1) AS `row_count`\n FROM t0\n GROUP BY 1, 2, 3\n ),\n@@ -331,7 +331,7 @@ t2 AS (\n )\n SELECT t3.*, t4.`total`\n FROM (\n- SELECT `d`, `b`, count(*) AS `count`, count(DISTINCT `c`) AS `unique`\n+ SELECT `d`, `b`, count(1) AS `count`, count(DISTINCT `c`) AS `unique`\n FROM t2\n GROUP BY 1, 2\n ) t3\n@@ -508,12 +508,12 @@ WITH t0 AS (\n SELECT t1.`year`, t1.`count` AS `pre_count`, t2.`count` AS `post_count`,\n t2.`count` / CAST(t1.`count` AS double) AS `fraction`\n FROM (\n- SELECT extract(`odate`, 'year') AS `year`, count(*) AS `count`\n+ SELECT extract(`odate`, 'year') AS `year`, count(1) AS `count`\n FROM t0\n GROUP BY 1\n ) t1\n INNER JOIN (\n- SELECT extract(t0.`odate`, 'year') AS `year`, count(*) AS `count`\n+ SELECT extract(t0.`odate`, 'year') AS `year`, count(1) AS `count`\n FROM t0\n WHERE t0.`o_totalprice` > (\n SELECT avg(t7.`o_totalprice`) AS `mean`\n@@ -551,7 +551,7 @@ WITH t0 AS (\n ON t6.`o_custkey` = t5.`c_custkey`\n ),\n t1 AS (\n- SELECT extract(`odate`, 'year') AS `year`, count(*) AS `count`\n+ SELECT extract(`odate`, 'year') AS `year`, count(1) AS `count`\n FROM t0\n GROUP BY 1\n )\n", "test_functions.py": "@@ -773,7 +773,5 @@ def test_count_on_order_by(con):\n result = str(\n expr.compile().compile(compile_kwargs={'literal_binds': True})\n )\n- expected = (\n- \"SELECT count('*') AS count \\nFROM main.batting AS t0\" # noqa: W291\n- )\n+ expected = \"SELECT count(*) AS count \\nFROM main.batting AS t0\"\n assert result == expected\n", "test_aggregation.py": "@@ -342,6 +342,11 @@ def test_aggregate_multikey_group_reduction(backend, alltypes, df):\n pytest.mark.notyet([\"impala\", \"pyspark\"]),\n ],\n ),\n+ param(\n+ lambda t, where: t.count(where=where),\n+ lambda t, where: len(t[where]),\n+ id='count_star',\n+ ),\n ],\n )\n @pytest.mark.parametrize(\n", "reductions.py": "@@ -21,7 +21,13 @@ class Filterable(Value):\n \n @public\n class Count(Filterable, Reduction):\n- arg = rlz.one_of((rlz.column(rlz.any), rlz.table))\n+ arg = rlz.column(rlz.any)\n+ output_dtype = dt.int64\n+\n+\n+@public\n+class CountStar(Filterable, Reduction):\n+ arg = rlz.table\n output_dtype = dt.int64\n \n \n", "logical.py": "@@ -42,8 +42,6 @@ class BooleanValue(NumericValue):\n SELECT CASE WHEN `is_person` THEN 'yes' ELSE 'no' END AS `tmp`\n FROM t\n \"\"\"\n- import ibis.expr.operations as ops\n-\n # Result will be the result of promotion of true/false exprs. These\n # might be conflicting types; same type resolution as case expressions\n # must be used.\n", "relations.py": "@@ -743,15 +743,37 @@ class Table(Expr):\n ]\n return an.apply_filter(table.op(), predicates).to_expr()\n \n- def count(self) -> ir.IntegerScalar:\n+ def count(self, where: ir.BooleanValue | None = None) -> ir.IntegerScalar:\n \"\"\"Compute the number of rows in the table.\n \n+ Parameters\n+ ----------\n+ where\n+ Optional boolean expression to filter rows when counting.\n+\n Returns\n -------\n IntegerScalar\n Number of rows in the table\n+\n+ Examples\n+ --------\n+ >>> import ibis\n+ >>> from ibis import _\n+ >>> t = ibis.table(dict(a=\"int\"), name=\"t\")\n+ >>> t.count()\n+ r0 := UnboundTable: t\n+ a int64\n+ count: CountStar(t)\n+ >>> t.aggregate(n=_.count(_.a > 1), total=_.sum())\n+ r0 := UnboundTable: t\n+ a int64\n+ Aggregation[r0]\n+ metrics:\n+ n: CountStar(t, where=r0.a > 1)\n+ total: Sum(r0.a)\n \"\"\"\n- return ops.Count(self).to_expr().name(\"count\")\n+ return ops.CountStar(self, where).to_expr().name(\"count\")\n \n def dropna(\n self,\n", "test_table.py": "@@ -418,7 +418,7 @@ def test_table_count(table):\n result = table.count()\n assert isinstance(result, ir.IntegerScalar)\n assert isinstance(result.op(), ops.Alias)\n- assert isinstance(result.op().arg, ops.Count)\n+ assert isinstance(result.op().arg, ops.CountStar)\n assert result.get_name() == 'count'\n \n \n", "test_compiler.py": "@@ -152,7 +152,7 @@ def test_having_size():\n assert (\n result\n == \"\"\"\\\n-SELECT `string_col`, count(*) AS `count`\n+SELECT `string_col`, count(1) AS `count`\n FROM functional_alltypes\n GROUP BY 1\n HAVING max(`double_col`) = 1\"\"\"\n", "test_non_tabular_results.py": "@@ -110,7 +110,7 @@ def test_complex_array_expr_projection(alltypes):\n expected = \"\"\"\\\n SELECT CAST(`g` AS double) AS `Cast(g, float64)`\n FROM (\n- SELECT `g`, count(*) AS `count`\n+ SELECT `g`, count(1) AS `count`\n FROM alltypes\n GROUP BY 1\n ) t0\"\"\"\n", "test_select_sql.py": "@@ -249,7 +249,7 @@ def test_where_analyze_scalar_op(functional_alltypes):\n \n result = Compiler.to_sql(expr)\n expected = \"\"\"\\\n-SELECT count(*) AS `count`\n+SELECT count(1) AS `count`\n FROM functional_alltypes\n WHERE (`timestamp_col` < date_add(cast({!r} as timestamp), INTERVAL 3 MONTH)) AND\n (`timestamp_col` < date_add(cast(now() as timestamp), INTERVAL 10 DAY))\"\"\" # noqa: E501\n@@ -331,7 +331,7 @@ HAVING sum(`f`) > 10\"\"\"\n expected = \"\"\"SELECT `foo_id`, sum(`f`) AS `total`\n FROM star1\n GROUP BY 1\n-HAVING count(*) > 100\"\"\"\n+HAVING count(1) > 100\"\"\"\n assert result == expected\n \n \n@@ -339,7 +339,7 @@ def test_aggregate_table_count_metric(star1):\n expr = star1.count()\n \n result = Compiler.to_sql(expr)\n- expected = \"\"\"SELECT count(*) AS `count`\n+ expected = \"\"\"SELECT count(1) AS `count`\n FROM star1\"\"\"\n assert result == expected\n \n@@ -349,7 +349,7 @@ def test_aggregate_count_joined(aggregate_count_joined):\n \n result = Compiler.to_sql(expr)\n expected = \"\"\"\\\n-SELECT count(*) AS `count`\n+SELECT count(1) AS `count`\n FROM (\n SELECT t2.*, t1.`r_name` AS `region`\n FROM tpch_region t1\n@@ -721,7 +721,7 @@ def test_limit_with_self_join(functional_alltypes):\n # it works\n result = Compiler.to_sql(expr)\n expected = \"\"\"\\\n-SELECT count(*) AS `count`\n+SELECT count(1) AS `count`\n FROM (\n SELECT t1.`id` AS `id_x`, t1.`bool_col` AS `bool_col_x`,\n t1.`tinyint_col` AS `tinyint_col_x`,\n@@ -911,7 +911,7 @@ def test_topk_analysis_bug():\n \n result = Compiler.to_sql(expr)\n expected = f\"\"\"\\\n-SELECT `origin`, count(*) AS `count`\n+SELECT `origin`, count(1) AS `count`\n FROM (\n SELECT t1.*\n FROM (\n@@ -1320,7 +1320,7 @@ def test_sort_by_on_limit_yield_subquery(functional_alltypes):\n result = Compiler.to_sql(expr)\n expected = \"\"\"SELECT *\n FROM (\n- SELECT `string_col`, count(*) AS `nrows`\n+ SELECT `string_col`, count(1) AS `nrows`\n FROM functional_alltypes\n GROUP BY 1\n LIMIT 5\n", "test_sqlalchemy.py": "@@ -409,7 +409,7 @@ def test_where_simple_comparisons(sa_star1, where_simple_comparisons):\n lambda st: (\n sa.select([st.c.foo_id, F.sum(st.c.f).label(\"total\")])\n .group_by(st.c.foo_id)\n- .having(F.count(\"*\") > L(100))\n+ .having(F.count() > L(100))\n ),\n ),\n ],\n"}
chore(pandas): get rid of type checking warnings in `ibis/formats/pandas.py`
ec0514b45244d779276d49e5ac64ae9d0fe5b8ef
chore
https://github.com/rohankumardubey/ibis/commit/ec0514b45244d779276d49e5ac64ae9d0fe5b8ef
get rid of type checking warnings in `ibis/formats/pandas.py`
{"pandas.py": "@@ -24,11 +24,11 @@ if not _has_arrow_dtype:\n class PandasType(NumpyType):\n @classmethod\n def to_ibis(cls, typ, nullable=True):\n- if pdt.is_datetime64tz_dtype(typ):\n+ if isinstance(typ, pdt.DatetimeTZDtype):\n return dt.Timestamp(timezone=str(typ.tz), nullable=nullable)\n elif pdt.is_datetime64_dtype(typ):\n return dt.Timestamp(nullable=nullable)\n- elif pdt.is_categorical_dtype(typ):\n+ elif isinstance(typ, pdt.CategoricalDtype):\n return dt.String(nullable=nullable)\n elif pdt.is_extension_array_dtype(typ):\n if _has_arrow_dtype and isinstance(typ, pd.ArrowDtype):\n@@ -162,15 +162,11 @@ class PandasData(DataMapper):\n \n @staticmethod\n def convert_Timestamp(s, dtype, pandas_type):\n- import pandas.api.types as pdt\n-\n- if pdt.is_datetime64tz_dtype(s.dtype):\n+ if isinstance(dtype, pd.DatetimeTZDtype):\n return s.dt.tz_convert(dtype.timezone)\n elif pdt.is_datetime64_dtype(s.dtype):\n return s.dt.tz_localize(dtype.timezone)\n else:\n- import pandas as pd\n-\n try:\n return s.astype(pandas_type)\n except pd.errors.OutOfBoundsDatetime: # uncovered\n@@ -188,9 +184,7 @@ class PandasData(DataMapper):\n \n @staticmethod\n def convert_Date(s, dtype, pandas_type):\n- import pandas.api.types as pdt\n-\n- if pdt.is_datetime64tz_dtype(s.dtype):\n+ if isinstance(s.dtype, pd.DatetimeTZDtype):\n s = s.dt.tz_convert(\"UTC\").dt.tz_localize(None)\n return s.astype(pandas_type, errors=\"ignore\").dt.normalize()\n \n"}
feat(snowflake): allow empty url when using ibis.connect (#8428) Follow-up to #8422 to support that behavior in `ibis.connect()`.
0275c9bcfb2905c916e0d8abbb5b78315f14a5f6
feat
https://github.com/ibis-project/ibis/commit/0275c9bcfb2905c916e0d8abbb5b78315f14a5f6
allow empty url when using ibis.connect (#8428) Follow-up to #8422 to support that behavior in `ibis.connect()`.
{"__init__.py": "@@ -106,17 +106,21 @@ class Backend(SQLBackend, CanCreateDatabase, CanCreateSchema):\n \"\"\"\n \n url = urlparse(url)\n- database, schema = url.path[1:].split(\"/\", 1)\n- query_params = parse_qs(url.query)\n- (warehouse,) = query_params.pop(\"warehouse\", (None,))\n- connect_args = {\n- \"user\": url.username,\n- \"password\": url.password or \"\",\n- \"account\": url.hostname,\n- \"warehouse\": warehouse,\n- \"database\": database or \"\",\n- \"schema\": schema or \"\",\n- }\n+ if url.path:\n+ database, schema = url.path[1:].split(\"/\", 1)\n+ query_params = parse_qs(url.query)\n+ (warehouse,) = query_params.pop(\"warehouse\", (None,))\n+ connect_args = {\n+ \"user\": url.username,\n+ \"password\": url.password or \"\",\n+ \"account\": url.hostname,\n+ \"warehouse\": warehouse,\n+ \"database\": database or \"\",\n+ \"schema\": schema or \"\",\n+ }\n+ else:\n+ connect_args = {}\n+ query_params = {}\n \n for name, value in query_params.items():\n if len(value) > 1:\n", "test_client.py": "@@ -301,3 +301,6 @@ def test_compile_does_not_make_requests(con, mocker):\n def test_no_argument_connection():\n con = ibis.snowflake.connect()\n assert con.list_tables() is not None\n+\n+ con = ibis.connect(\"snowflake://\")\n+ assert con.list_tables() is not None\n"}
feat: add `[x; _]` literal
f1d71e0dab4f87cd5d6fc452fefbef20d41fb777
feat
https://github.com/erg-lang/erg/commit/f1d71e0dab4f87cd5d6fc452fefbef20d41fb777
add `[x; _]` literal
{"codegen.rs": "@@ -34,6 +34,7 @@ use erg_parser::token::{Token, TokenKind};\n \n use crate::compile::{AccessKind, Name, StoreLoadKind};\n use crate::error::CompileError;\n+use crate::hir::ArrayWithLength;\n use crate::hir::{\n Accessor, Args, Array, BinOp, Block, Call, ClassDef, Def, DefBody, Expr, GuardClause,\n Identifier, Lambda, Literal, NonDefaultParamSignature, Params, PatchDef, PosArg, ReDef, Record,\n@@ -2735,7 +2736,11 @@ impl PyCodeGenerator {\n let init_stack_len = self.stack_len();\n if !self.cfg.no_std {\n self.emit_push_null();\n- self.emit_load_name_instr(Identifier::public(\"Array\"));\n+ if array.is_unsized() {\n+ self.emit_load_name_instr(Identifier::public(\"UnsizedArray\"));\n+ } else {\n+ self.emit_load_name_instr(Identifier::public(\"Array\"));\n+ }\n }\n match array {\n Array::Normal(mut arr) => {\n@@ -2751,16 +2756,25 @@ impl PyCodeGenerator {\n self.stack_dec_n(len - 1);\n }\n }\n- Array::WithLength(arr) => {\n- self.emit_expr(*arr.elem);\n+ Array::WithLength(ArrayWithLength {\n+ elem,\n+ len: Some(len),\n+ ..\n+ }) => {\n+ self.emit_expr(*elem);\n self.write_instr(BUILD_LIST);\n self.write_arg(1);\n self.emit_call_instr(1, Name);\n self.stack_dec();\n- self.emit_expr(*arr.len);\n+ self.emit_expr(*len);\n self.emit_binop_instr(Token::dummy(TokenKind::Star, \"*\"), TypePair::ArrayNat);\n return;\n }\n+ Array::WithLength(ArrayWithLength {\n+ elem, len: None, ..\n+ }) => {\n+ self.emit_expr(*elem);\n+ }\n other => todo!(\"{other}\"),\n }\n if !self.cfg.no_std {\n", "compare.rs": "@@ -533,7 +533,9 @@ impl Context {\n (Mono(n), NamedTuple(_)) => &n[..] == \"GenericNamedTuple\" || &n[..] == \"GenericTuple\",\n (Mono(n), Record(_)) => &n[..] == \"Record\",\n (Type, Subr(subr)) => self.supertype_of(&Type, &subr.return_t),\n- (Type, Poly { name, params }) if &name[..] == \"Array\" || &name[..] == \"Set\" => {\n+ (Type, Poly { name, params })\n+ if &name[..] == \"Array\" || &name[..] == \"UnsizedArray\" || &name[..] == \"Set\" =>\n+ {\n let elem_t = self.convert_tp_into_type(params[0].clone()).unwrap();\n self.supertype_of(&Type, &elem_t)\n }\n", "eval.rs": "@@ -21,7 +21,7 @@ use erg_parser::token::{Token, TokenKind};\n \n use crate::ty::constructors::{\n array_t, bounded, dict_t, mono, mono_q, named_free_var, poly, proj, proj_call, ref_, ref_mut,\n- refinement, set_t, subr_t, subtypeof, tp_enum, tuple_t, v_enum,\n+ refinement, set_t, subr_t, subtypeof, tp_enum, tuple_t, unknown_len_array_t, v_enum,\n };\n use crate::ty::free::{Constraint, HasLevel};\n use crate::ty::typaram::{OpKind, TyParam};\n@@ -731,6 +731,33 @@ impl Context {\n fn eval_const_array(&self, arr: &Array) -> EvalResult<ValueObj> {\n match arr {\n Array::Normal(arr) => self.eval_const_normal_array(arr),\n+ Array::WithLength(arr) => {\n+ let elem = self.eval_const_expr(&arr.elem.expr)?;\n+ match arr.len.as_ref() {\n+ Expr::Accessor(Accessor::Ident(ident)) if ident.is_discarded() => {\n+ Ok(ValueObj::UnsizedArray(Box::new(elem)))\n+ }\n+ other => {\n+ let len = self.eval_const_expr(other)?;\n+ let len = usize::try_from(&len).map_err(|_| {\n+ EvalError::type_mismatch_error(\n+ self.cfg.input.clone(),\n+ line!() as usize,\n+ other.loc(),\n+ self.caused_by(),\n+ \"_\",\n+ None,\n+ &Type::Nat,\n+ &len.t(),\n+ None,\n+ None,\n+ )\n+ })?;\n+ let arr = vec![elem; len];\n+ Ok(ValueObj::Array(ArcArray::from(arr)))\n+ }\n+ }\n+ }\n _ => Err(EvalErrors::from(EvalError::not_const_expr(\n self.cfg.input.clone(),\n line!() as usize,\n@@ -1903,6 +1930,10 @@ impl Context {\n }\n Ok(array_t(union, len))\n }\n+ ValueObj::UnsizedArray(elem) => {\n+ let elem = self.convert_value_into_type(*elem)?;\n+ Ok(unknown_len_array_t(elem))\n+ }\n ValueObj::Set(set) => Ok(v_enum(set)),\n ValueObj::Dict(dic) => {\n let dic = dic\n", "generalize.rs": "@@ -1125,7 +1125,9 @@ impl Context {\n }\n hir::Array::WithLength(arr) => {\n self.resolve_expr_t(&mut arr.elem, qnames)?;\n- self.resolve_expr_t(&mut arr.len, qnames)?;\n+ if let Some(len) = &mut arr.len {\n+ self.resolve_expr_t(len, qnames)?;\n+ }\n let t = mem::take(&mut arr.t);\n let mut dereferencer = Dereferencer::simple(self, qnames, arr);\n arr.t = dereferencer.deref_tyvar(t)?;\n", "declare.rs": "@@ -277,7 +277,7 @@ impl ASTLowerer {\n arr.r_sqbr,\n Type::Failure,\n elem,\n- len,\n+ Some(len),\n )))\n }\n ast::Array::Normal(arr) => {\n", "effectcheck.rs": "@@ -130,7 +130,9 @@ impl SideEffectChecker {\n }\n Array::WithLength(arr) => {\n self.check_expr(&arr.elem);\n- self.check_expr(&arr.len);\n+ if let Some(len) = &arr.len {\n+ self.check_expr(len);\n+ }\n }\n Array::Comprehension(arr) => {\n self.check_expr(&arr.elem);\n@@ -340,7 +342,9 @@ impl SideEffectChecker {\n }\n Array::WithLength(arr) => {\n self.check_expr(&arr.elem);\n- self.check_expr(&arr.len);\n+ if let Some(len) = &arr.len {\n+ self.check_expr(len);\n+ }\n }\n Array::Comprehension(arr) => {\n self.check_expr(&arr.elem);\n@@ -491,7 +495,10 @@ impl SideEffectChecker {\n .pos_args\n .iter()\n .any(|elem| Self::is_impure(&elem.expr)),\n- Array::WithLength(arr) => Self::is_impure(&arr.elem) || Self::is_impure(&arr.len),\n+ Array::WithLength(arr) => {\n+ Self::is_impure(&arr.elem)\n+ || arr.len.as_ref().map_or(false, |len| Self::is_impure(len))\n+ }\n _ => todo!(),\n },\n Expr::Tuple(tup) => match tup {\n", "hir.rs": "@@ -9,7 +9,7 @@ use erg_common::log;\n use erg_common::set::Set as HashSet;\n use erg_common::traits::{Locational, NestedDisplay, NoTypeDisplay, Stream};\n use erg_common::{\n- enum_unwrap, fmt_option, fmt_vec, fmt_vec_split_with, impl_display_for_enum,\n+ enum_unwrap, fmt_option, fmt_option_map, fmt_vec, fmt_vec_split_with, impl_display_for_enum,\n impl_display_from_nested, impl_locational, impl_locational_for_enum,\n impl_nested_display_for_chunk_enum, impl_nested_display_for_enum,\n impl_no_type_display_for_enum, impl_stream,\n@@ -702,12 +702,18 @@ pub struct ArrayWithLength {\n pub r_sqbr: Token,\n pub t: Type,\n pub elem: Box<Expr>,\n- pub len: Box<Expr>,\n+ pub len: Option<Box<Expr>>,\n }\n \n impl NestedDisplay for ArrayWithLength {\n fn fmt_nest(&self, f: &mut fmt::Formatter<'_>, _level: usize) -> fmt::Result {\n- write!(f, \"[{}; {}](: {})\", self.elem, self.len, self.t)\n+ write!(\n+ f,\n+ \"[{}; {}](: {})\",\n+ self.elem,\n+ fmt_option!(self.len, else \"_\"),\n+ self.t\n+ )\n }\n }\n \n@@ -716,7 +722,7 @@ impl NoTypeDisplay for ArrayWithLength {\n format!(\n \"[{}; {}]\",\n self.elem.to_string_notype(),\n- self.len.to_string_notype()\n+ fmt_option_map!(self.len, else \"_\", |len: &Expr| len.to_string_notype())\n )\n }\n }\n@@ -726,15 +732,19 @@ impl_locational!(ArrayWithLength, l_sqbr, elem, r_sqbr);\n impl_t!(ArrayWithLength);\n \n impl ArrayWithLength {\n- pub fn new(l_sqbr: Token, r_sqbr: Token, t: Type, elem: Expr, len: Expr) -> Self {\n+ pub fn new(l_sqbr: Token, r_sqbr: Token, t: Type, elem: Expr, len: Option<Expr>) -> Self {\n Self {\n l_sqbr,\n r_sqbr,\n t,\n elem: Box::new(elem),\n- len: Box::new(len),\n+ len: len.map(Box::new),\n }\n }\n+\n+ pub const fn is_unsized(&self) -> bool {\n+ self.len.is_none()\n+ }\n }\n \n // TODO: generators\n@@ -829,6 +839,12 @@ impl_display_for_enum!(Array; Normal, Comprehension, WithLength);\n impl_locational_for_enum!(Array; Normal, Comprehension, WithLength);\n impl_t_for_enum!(Array; Normal, Comprehension, WithLength);\n \n+impl Array {\n+ pub const fn is_unsized(&self) -> bool {\n+ matches!(self, Self::WithLength(arr) if arr.is_unsized())\n+ }\n+}\n+\n #[derive(Debug, Clone, PartialEq, Eq, Hash)]\n pub struct NormalTuple {\n pub elems: Args,\n", "_erg_array.py": "@@ -3,6 +3,8 @@ from _erg_range import Range\n from _erg_nat import NatMut\n from _erg_int import IntMut\n from _erg_contains_operator import contains_operator\n+from _erg_result import is_ok\n+from _erg_result import Error\n \n class Array(list):\n def try_new(arr): # -> Result[Array]\n@@ -11,6 +13,21 @@ class Array(list):\n else:\n return Error(\"not a list\")\n \n+ def generic_try_new(arr, cls = None): # -> Result[Array]\n+ if cls is None:\n+ return Array.try_new(arr)\n+ else:\n+ elem_t = cls.__args__[0]\n+ elems = []\n+ for elem in arr:\n+ # TODO: nested check\n+ elem = elem_t.try_new(elem)\n+ if is_ok(elem):\n+ elems.append(elem)\n+ else:\n+ return Error(\"not a \" + str(elem_t))\n+ return Array(elems)\n+\n def dedup(self, same_bucket=None):\n if same_bucket is None:\n return Array(list(set(self)))\n@@ -77,3 +94,8 @@ class Array(list):\n \n def update_nth(self, index, f):\n self[index] = f(self[index])\n+\n+class UnsizedArray:\n+ elem: object\n+ def __init__(self, elem):\n+ self.elem = elem\n", "_erg_contains_operator.py": "@@ -12,6 +12,8 @@ def contains_operator(y, elem) -> bool:\n elif is_type(y):\n if _isinstance(elem, y):\n return True\n+ elif hasattr(y, \"generic_try_new\"):\n+ return is_ok(y.generic_try_new(elem, y))\n elif hasattr(y, \"try_new\") and is_ok(y.try_new(elem)):\n return True\n elif hasattr(y, \"__origin__\") and hasattr(y.__origin__, \"type_check\"):\n", "_erg_std_prelude.py": "@@ -13,7 +13,7 @@ from _erg_nat import Nat, NatMut\n from _erg_bool import Bool\n from _erg_bytes import Bytes\n from _erg_str import Str, StrMut\n-from _erg_array import Array\n+from _erg_array import Array, UnsizedArray\n from _erg_dict import Dict\n from _erg_set import Set\n from _erg_contains_operator import contains_operator\n", "link_hir.rs": "@@ -130,7 +130,9 @@ impl<'a> HIRLinker<'a> {\n }\n Array::WithLength(arr) => {\n Self::resolve_pymod_path(&mut arr.elem);\n- Self::resolve_pymod_path(&mut arr.len);\n+ if let Some(len) = arr.len.as_deref_mut() {\n+ Self::resolve_pymod_path(len);\n+ }\n }\n _ => todo!(),\n },\n@@ -252,7 +254,9 @@ impl<'a> HIRLinker<'a> {\n }\n Array::WithLength(arr) => {\n self.replace_import(&mut arr.elem);\n- self.replace_import(&mut arr.len);\n+ if let Some(len) = arr.len.as_deref_mut() {\n+ self.replace_import(len);\n+ }\n }\n _ => todo!(),\n },\n", "lower.rs": "@@ -27,7 +27,8 @@ use crate::artifact::{CompleteArtifact, IncompleteArtifact};\n use crate::context::instantiate::TyVarCache;\n use crate::module::SharedCompilerResource;\n use crate::ty::constructors::{\n- array_t, free_var, func, guard, mono, poly, proc, refinement, set_t, singleton, ty_tp, v_enum,\n+ array_t, free_var, func, guard, mono, poly, proc, refinement, set_t, singleton, ty_tp,\n+ unsized_array_t, v_enum,\n };\n use crate::ty::free::Constraint;\n use crate::ty::typaram::TyParam;\n@@ -370,18 +371,26 @@ impl ASTLowerer {\n });\n let elem = self.lower_expr(array.elem.expr, expect_elem.as_ref())?;\n let array_t = self.gen_array_with_length_type(&elem, &array.len);\n- let len = self.lower_expr(*array.len, Some(&Type::Nat))?;\n+ let len = match *array.len {\n+ ast::Expr::Accessor(ast::Accessor::Ident(ident)) if ident.is_discarded() => None,\n+ len => Some(self.lower_expr(len, Some(&Type::Nat))?),\n+ };\n let hir_array = hir::ArrayWithLength::new(array.l_sqbr, array.r_sqbr, array_t, elem, len);\n Ok(hir_array)\n }\n \n fn gen_array_with_length_type(&self, elem: &hir::Expr, len: &ast::Expr) -> Type {\n+ match len {\n+ ast::Expr::Accessor(ast::Accessor::Ident(ident)) if ident.is_discarded() => {\n+ return unsized_array_t(elem.t());\n+ }\n+ _ => {}\n+ }\n let maybe_len = self.module.context.eval_const_expr(len);\n match maybe_len {\n Ok(v @ ValueObj::Nat(_)) => array_t(elem.t(), TyParam::Value(v)),\n Ok(other) => todo!(\"{other} is not a Nat object\"),\n- // REVIEW: is it ok to ignore the error?\n- Err(_e) => array_t(elem.t(), TyParam::erased(Type::Nat)),\n+ Err(err) => todo!(\"{err}\"),\n }\n }\n \n", "ownercheck.rs": "@@ -208,7 +208,9 @@ impl OwnershipChecker {\n }\n Array::WithLength(arr) => {\n self.check_expr(&arr.elem, ownership, false);\n- self.check_expr(&arr.len, ownership, false);\n+ if let Some(len) = &arr.len {\n+ self.check_expr(len, ownership, false);\n+ }\n }\n _ => todo!(),\n },\n", "transpile.rs": "@@ -1130,8 +1130,9 @@ impl JsonGenerator {\n Some(ValueObj::Array(vals.into()))\n }\n Expr::Array(Array::WithLength(arr)) => {\n- let len = self\n- .expr_into_value(*arr.len)\n+ let len = arr\n+ .len\n+ .and_then(|len| self.expr_into_value(*len))\n .and_then(|v| usize::try_from(&v).ok())?;\n let vals = vec![self.expr_into_value(*arr.elem)?; len];\n Some(ValueObj::Array(vals.into()))\n", "constructors.rs": "@@ -55,6 +55,12 @@ pub fn unknown_len_array_mut(elem_t: Type) -> Type {\n array_mut(elem_t, TyParam::erased(Type::Nat))\n }\n \n+/// `UnsizedArray` is a type of `[x; _]` (unsized array literal).\n+/// `UnsizedArray(T) != Array(T, _)`\n+pub fn unsized_array_t(elem_t: Type) -> Type {\n+ poly(\"UnsizedArray\", vec![TyParam::t(elem_t)])\n+}\n+\n pub fn tuple_t(args: Vec<Type>) -> Type {\n poly(\n \"Tuple\",\n", "value.rs": "@@ -25,7 +25,7 @@ use crate::context::Context;\n use self::value_set::inner_class;\n \n use super::codeobj::CodeObj;\n-use super::constructors::{array_t, dict_t, refinement, set_t, tuple_t};\n+use super::constructors::{array_t, dict_t, refinement, set_t, tuple_t, unsized_array_t};\n use super::typaram::{OpKind, TyParam};\n use super::{ConstSubr, Field, HasType, Predicate, Type};\n use super::{CONTAINER_OMIT_THRESHOLD, STR_OMIT_THRESHOLD};\n@@ -503,6 +503,7 @@ pub enum ValueObj {\n Str(Str),\n Bool(bool),\n Array(ArcArray<ValueObj>),\n+ UnsizedArray(Box<ValueObj>),\n Set(Set<ValueObj>),\n Dict(Dict<ValueObj, ValueObj>),\n Tuple(ArcArray<ValueObj>),\n@@ -561,6 +562,7 @@ impl fmt::Debug for ValueObj {\n }\n }\n Self::Array(arr) => write!(f, \"[{}]\", fmt_iter(arr.iter())),\n+ Self::UnsizedArray(elem) => write!(f, \"[{elem}; _]\"),\n Self::Dict(dict) => {\n write!(f, \"{{\")?;\n for (i, (k, v)) in dict.iter().enumerate() {\n@@ -749,6 +751,10 @@ impl Hash for ValueObj {\n Self::Str(s) => s.hash(state),\n Self::Bool(b) => b.hash(state),\n Self::Array(arr) => arr.hash(state),\n+ Self::UnsizedArray(elem) => {\n+ \"UnsizedArray\".hash(state);\n+ elem.hash(state)\n+ }\n Self::Dict(dict) => dict.hash(state),\n Self::Tuple(tup) => tup.hash(state),\n Self::Set(st) => st.hash(state),\n@@ -1157,6 +1163,7 @@ impl ValueObj {\n .unwrap_or(Type::Never),\n TyParam::value(arr.len()),\n ),\n+ Self::UnsizedArray(elem) => unsized_array_t(elem.class()),\n Self::Dict(dict) => {\n let tp = dict\n .iter()\n", "assert_cast.er": "@@ -12,6 +12,10 @@ assert j[\"a\"] in Array(Int)\n assert j[\"a\"] notin Array(Str)\n _: Array(Int) = j[\"a\"]\n \n+k = json.loads \"{ \\\"a\\\": [1] }\"\n+assert k in {Str: Obj}\n+assert k[\"a\"] notin Array(Str)\n+\n dic = {\"a\": \"b\", \"c\": \"d\"}\n assert dic in {Str: {\"b\", \"d\"}}\n assert dic in {Str: Str}\n", "class.er": "@@ -17,3 +17,11 @@ assert d.foo(1) == 2\n \n c = C.new { .x = D.new(1) }\n assert c.x.y == 1\n+\n+Vec = Class [Int; _]\n+Vec.\n+ sum self =\n+ sum(self::base)\n+\n+v = Vec.new [1, 2, 3]\n+assert v.sum() == 6\n"}
refactor(backends): remove `ast_schema` method This method is only used in two places and we have a more obvious API to do this now: `expr.as_table().schema()`. BREAKING CHANGE: `Backend.ast_schema` is removed. Use `expr.as_table().schema()` instead.
51b5ef8a2331ebaa03f5b573856c4f88bd297a34
refactor
https://github.com/ibis-project/ibis/commit/51b5ef8a2331ebaa03f5b573856c4f88bd297a34
remove `ast_schema` method This method is only used in two places and we have a more obvious API to do this now: `expr.as_table().schema()`. BREAKING CHANGE: `Backend.ast_schema` is removed. Use `expr.as_table().schema()` instead.
{"__init__.py": "@@ -328,7 +328,7 @@ class Backend(BaseSQLBackend):\n sql = query_ast.compile()\n self._log(sql)\n cursor = self.raw_sql(sql, params=params, **kwargs)\n- schema = self.ast_schema(query_ast, **kwargs)\n+ schema = expr.as_table().schema()\n result = self.fetch_from_cursor(cursor, schema)\n \n if hasattr(getattr(query_ast, \"dml\", query_ast), \"result_handler\"):\n", "mkdocs.yml": "@@ -77,7 +77,6 @@ plugins:\n filters:\n - \"!^_\"\n - \"!^__\"\n- - \"!^ast_schema\"\n - \"!^backend_table_type\"\n - \"!^column$\"\n - \"!^compiler$\"\n"}
feat(sql): add native support for generated columns (#4884) To use generated columns, you can either use the `generated` option, or specify it as part of the `columnType`: ```ts @Entity() export class User { @PrimaryKey() id!: number; @Property({ length: 50 }) firstName!: string; @Property({ length: 50 }) lastName!: string; @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` }) fullName!: string & Opt; @Property({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` }) fullName2!: string & Opt; } ``` To use a generated identity column in PostgreSQL, set the `generated` option to `identity`: > To allow providing the value explicitly, use `generated: 'by default as identity'`. ```ts @Entity() export class User { @PrimaryKey({ generated: 'identity' }) id!: number; } ```
a928291335f6867e02ed948afb5c9abd17975dba
feat
https://github.com/mikro-orm/mikro-orm/commit/a928291335f6867e02ed948afb5c9abd17975dba
add native support for generated columns (#4884) To use generated columns, you can either use the `generated` option, or specify it as part of the `columnType`: ```ts @Entity() export class User { @PrimaryKey() id!: number; @Property({ length: 50 }) firstName!: string; @Property({ length: 50 }) lastName!: string; @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` }) fullName!: string & Opt; @Property({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` }) fullName2!: string & Opt; } ``` To use a generated identity column in PostgreSQL, set the `generated` option to `identity`: > To allow providing the value explicitly, use `generated: 'by default as identity'`. ```ts @Entity() export class User { @PrimaryKey({ generated: 'identity' }) id!: number; } ```
{"defining-entities.md": "@@ -1323,7 +1323,7 @@ export abstract class CustomBaseEntity {\n \n ## SQL Generated columns\n \n-Knex currently does not support generated columns, so the schema generator cannot properly diff them. To work around this, we can set `ignoreSchemaChanges` on a property to avoid a perpetual diff from the schema generator\n+To use generated columns, you can either use the `generated` option, or specify it as part of the `columnType`:\n \n <Tabs\n groupId=\"entity-def\"\n@@ -1336,18 +1336,24 @@ values={[\n }>\n <TabItem value=\"reflect-metadata\">\n \n-```ts title=\"./entities/Book.ts\"\n-@Entity\n-export class Book {\n+```ts title=\"./entities/User.ts\"\n+@Entity()\n+export class User {\n \n- @Property()\n- title!: string;\n+ @PrimaryKey()\n+ id!: number;\n \n- @Property({\n- columnType: 'VARCHAR GENERATED ALWAYS AS (LOWER(`title`)) VIRTUAL',\n- ignoreSchemaChanges: ['type', 'extra'],\n- })\n- titleLower!: string;\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n \n }\n ```\n@@ -1356,17 +1362,23 @@ export class Book {\n <TabItem value=\"ts-morph\">\n \n ```ts title=\"./entities/Book.ts\"\n-@Entity\n-export class Book {\n+@Entity()\n+export class User {\n \n- @Property()\n- title!: string;\n+ @PrimaryKey()\n+ id!: number;\n \n- @Property({\n- columnType: 'VARCHAR GENERATED ALWAYS AS (LOWER(`title`)) VIRTUAL',\n- ignoreSchemaChanges: ['type', 'extra'],\n- })\n- titleLower!: string;\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n \n }\n ```\n@@ -1374,17 +1386,87 @@ export class Book {\n </TabItem>\n <TabItem value=\"entity-schema\">\n \n+```ts title=\"./entities/User.ts\"\n+export interface IUser {\n+ id: number;\n+ firstName: string;\n+ lastName: string;\n+ fullName: string & Opt;\n+ fullName2: string & Opt;\n+}\n+\n+export const User = new EntitySchema<IUser>({\n+ name: 'User',\n+ properties: {\n+ id: { type: 'number', primary: true },\n+ firstName: { type: 'string', length: 50 },\n+ lastName: { type: 'string', length: 50 },\n+ fullName: { \n+ type: 'string',\n+ length: 100, \n+ generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored`,\n+ },\n+ fullName2: { \n+ type: 'string', \n+ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual`,\n+ },\n+ },\n+});\n+```\n+\n+ </TabItem>\n+</Tabs>\n+\n+To use a generated identity column in PostgreSQL, set the `generated` option to `identity`:\n+\n+> To allow providing the value explicitly, use `generated: 'by default as identity'`.\n+\n+<Tabs\n+groupId=\"entity-def\"\n+defaultValue=\"reflect-metadata\"\n+values={[\n+{label: 'reflect-metadata', value: 'reflect-metadata'},\n+{label: 'ts-morph', value: 'ts-morph'},\n+{label: 'EntitySchema', value: 'entity-schema'},\n+]\n+}>\n+<TabItem value=\"reflect-metadata\">\n+\n+```ts title=\"./entities/User.ts\"\n+@Entity()\n+export class User {\n+\n+ @PrimaryKey({ generated: 'identity' })\n+ id!: number;\n+\n+}\n+```\n+\n+ </TabItem>\n+ <TabItem value=\"ts-morph\">\n+\n ```ts title=\"./entities/Book.ts\"\n-export interface IBook {\n- title: string;\n- titleLower: string;\n+@Entity()\n+export class User {\n+\n+ @PrimaryKey({ generated: 'identity' })\n+ id!: number;\n+\n+}\n+```\n+\n+ </TabItem>\n+ <TabItem value=\"entity-schema\">\n+\n+```ts title=\"./entities/User.ts\"\n+export interface IUser {\n+ id: number;\n }\n \n-export const Book = new EntitySchema<IBook>({\n- name: 'Book',\n+export const User = new EntitySchema<IUser>({\n+ name: 'User',\n properties: {\n- title: { type: String },\n- titleLower: { type: String, columnType: 'VARCHAR GENERATED ALWAYS AS (LOWER(`title`)) VIRTUAL', ignoreSchemaChanges: ['type', 'extra'] },\n+ id: { type: 'number', primary: true, generated: 'identity' },\n },\n });\n ```\n", "BetterSqliteConnection.ts": "@@ -178,7 +178,7 @@ export class BetterSqliteConnection extends AbstractSqlConnection {\n if (obj.method === 'raw') {\n const query = obj.sql.trim().toLowerCase();\n \n- if (query.startsWith('insert into') && query.includes(' returning ')) {\n+ if ((query.startsWith('insert into') || query.startsWith('update ')) && query.includes(' returning ')) {\n return 'all';\n }\n \n", "BetterSqliteSchemaHelper.ts": "@@ -20,16 +20,53 @@ export class BetterSqliteSchemaHelper extends SchemaHelper {\n + `union all select name as table_name from sqlite_temp_master where type = 'table' order by name`;\n }\n \n+ private parseTableDefinition(sql: string, cols: any[]) {\n+ const columns: Dictionary<{ name: string; definition: string }> = {};\n+\n+ // extract all columns definitions\n+ let columnsDef = sql.replaceAll('\\n', '').match(new RegExp(`create table [\\`\"']?.*?[\\`\"']? \\\\((.*)\\\\)`, 'i'))?.[1];\n+\n+ /* istanbul ignore else */\n+ if (columnsDef) {\n+ for (let i = cols.length - 1; i >= 0; i--) {\n+ const col = cols[i];\n+ const re = ` *, *[\\`\"']?${col.name}[\\`\"']? (.*)`;\n+ const columnDef = columnsDef.match(new RegExp(re, 'i'));\n+\n+ /* istanbul ignore else */\n+ if (columnDef) {\n+ columns[col.name] = { name: col.name, definition: columnDef[1] };\n+ columnsDef = columnsDef.substring(0, columnDef.index);\n+ }\n+ }\n+ }\n+\n+ return columns;\n+ }\n+\n override async getColumns(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<any[]> {\n- const columns = await connection.execute<any[]>(`pragma table_info('${tableName}')`);\n+ const columns = await connection.execute<any[]>(`pragma table_xinfo('${tableName}')`);\n const sql = `select sql from sqlite_master where type = ? and name = ?`;\n const tableDefinition = await connection.execute<{ sql: string }>(sql, ['table', tableName], 'get');\n const composite = columns.reduce((count, col) => count + (col.pk ? 1 : 0), 0) > 1;\n // there can be only one, so naive check like this should be enough\n const hasAutoincrement = tableDefinition.sql.toLowerCase().includes('autoincrement');\n+ const columnDefinitions = this.parseTableDefinition(tableDefinition.sql, columns);\n \n return columns.map(col => {\n const mappedType = connection.getPlatform().getMappedType(col.type);\n+ let generated: string | undefined;\n+\n+ if (col.hidden > 1) {\n+ const storage = col.hidden === 2 ? 'virtual' : 'stored';\n+ const re = `(generated always)? as \\\\((.*)\\\\)( ${storage})?$`;\n+ const match = columnDefinitions[col.name].definition.match(re);\n+\n+ if (match) {\n+ generated = `${match[2]} ${storage}`;\n+ }\n+ }\n+\n return {\n name: col.name,\n type: col.type,\n@@ -39,6 +76,7 @@ export class BetterSqliteSchemaHelper extends SchemaHelper {\n mappedType,\n unsigned: false,\n autoincrement: !composite && col.pk && this.platform.isNumericColumn(mappedType) && hasAutoincrement,\n+ generated,\n };\n });\n }\n@@ -62,7 +100,7 @@ export class BetterSqliteSchemaHelper extends SchemaHelper {\n }, {} as Dictionary<string[]>);\n }\n \n- override async getPrimaryKeys(connection: AbstractSqlConnection, indexes: IndexDef[] = [], tableName: string, schemaName?: string): Promise<string[]> {\n+ override async getPrimaryKeys(connection: AbstractSqlConnection, indexes: IndexDef[], tableName: string, schemaName?: string): Promise<string[]> {\n const sql = `pragma table_info(\\`${tableName}\\`)`;\n const cols = await connection.execute<{ pk: number; name: string }[]>(sql);\n \n@@ -79,8 +117,8 @@ export class BetterSqliteSchemaHelper extends SchemaHelper {\n ret.push({\n columnNames: [col.name],\n keyName: 'primary',\n- unique: true,\n constraint: true,\n+ unique: true,\n primary: true,\n });\n }\n", "Property.ts": "@@ -6,6 +6,7 @@ import type {\n EntityProperty,\n Constructor,\n CheckCallback,\n+ GeneratedColumnCallback,\n AnyString,\n AnyEntity,\n EntityKey,\n@@ -119,9 +120,13 @@ export type PropertyOptions<Owner> = {\n /**\n * Set to map some SQL snippet for the entity.\n *\n- * @see https://mikro-orm.io/docs/defining-entities#formulas Formulas}\n+ * @see https://mikro-orm.io/docs/defining-entities#formulas Formulas\n */\n formula?: string | ((alias: string) => string);\n+ /**\n+ * For generated columns. This will be appended to the column type after the `generated always` clause.\n+ */\n+ generated?: string | GeneratedColumnCallback<Owner>;\n /**\n * Set column as nullable for {@link https://mikro-orm.io/docs/schema-generator Schema Generator}.\n */\n", "EntityValidator.ts": "@@ -49,6 +49,7 @@ export class EntityValidator {\n !prop.default &&\n !prop.defaultRaw &&\n !prop.onCreate &&\n+ !prop.generated &&\n !prop.embedded &&\n ![ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(prop.kind) &&\n prop.name !== wrapped.__meta.root.discriminatorColumn &&\n", "MetadataDiscovery.ts": "@@ -1,7 +1,15 @@\n import { basename, extname } from 'path';\n import globby from 'globby';\n \n-import { EntityMetadata, type AnyEntity, type Constructor, type Dictionary, type EntityClass, type EntityClassGroup, type EntityProperty } from '../typings';\n+import {\n+ type AnyEntity,\n+ type Constructor,\n+ type Dictionary,\n+ type EntityClass,\n+ type EntityClassGroup,\n+ EntityMetadata,\n+ type EntityProperty,\n+} from '../typings';\n import { Utils } from '../utils/Utils';\n import type { Configuration } from '../utils/Configuration';\n import { MetadataValidator } from './MetadataValidator';\n@@ -10,7 +18,7 @@ import type { NamingStrategy } from '../naming-strategy/NamingStrategy';\n import type { SyncCacheAdapter } from '../cache/CacheAdapter';\n import { MetadataStorage } from './MetadataStorage';\n import { EntitySchema } from './EntitySchema';\n-import { Cascade, ReferenceKind, type EventType } from '../enums';\n+import { Cascade, type EventType, ReferenceKind } from '../enums';\n import { MetadataError } from '../errors';\n import type { Platform } from '../platforms';\n import { ArrayType, BigIntType, BlobType, EnumArrayType, JsonType, t, Type, Uint8ArrayType } from '../types';\n@@ -112,6 +120,7 @@ export class MetadataDiscovery {\n filtered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initFieldName(prop)));\n filtered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initVersionProperty(meta, prop)));\n filtered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initCustomType(meta, prop)));\n+ filtered.forEach(meta => Object.values(meta.properties).forEach(prop => this.initGeneratedColumn(meta, prop)));\n filtered.forEach(meta => this.initAutoincrement(meta)); // once again after we init custom types\n filtered.forEach(meta => this.initCheckConstraints(meta));\n \n@@ -1050,13 +1059,7 @@ export class MetadataDiscovery {\n }\n \n private initCheckConstraints(meta: EntityMetadata): void {\n- const map = Object.values(meta.properties).reduce((o, prop) => {\n- if (prop.fieldNames) {\n- o[prop.name] = prop.fieldNames[0];\n- }\n-\n- return o;\n- }, {} as Dictionary);\n+ const map = this.createColumnMappingObject(meta);\n \n for (const check of meta.checks) {\n const columns = check.property ? meta.properties[check.property].fieldNames : [];\n@@ -1068,6 +1071,35 @@ export class MetadataDiscovery {\n }\n }\n \n+ private initGeneratedColumn(meta: EntityMetadata, prop: EntityProperty): void {\n+ if (!prop.generated && prop.columnTypes) {\n+ const match = prop.columnTypes[0].match(/(.*) generated always as (.*)/);\n+\n+ if (match) {\n+ prop.columnTypes[0] = match[1];\n+ prop.generated = match[2];\n+ }\n+\n+ return;\n+ }\n+\n+ const map = this.createColumnMappingObject(meta);\n+\n+ if (prop.generated instanceof Function) {\n+ prop.generated = prop.generated(map);\n+ }\n+ }\n+\n+ private createColumnMappingObject(meta: EntityMetadata<any>) {\n+ return Object.values(meta.properties).reduce((o, prop) => {\n+ if (prop.fieldNames) {\n+ o[prop.name] = prop.fieldNames[0];\n+ }\n+\n+ return o;\n+ }, {} as Dictionary);\n+ }\n+\n private getDefaultVersionValue(prop: EntityProperty): string {\n if (typeof prop.defaultRaw !== 'undefined') {\n return prop.defaultRaw;\n", "typings.ts": "@@ -48,6 +48,7 @@ export interface Column {\n scale?: number;\n default?: string | null;\n comment?: string;\n+ generated?: string;\n nativeEnumName?: string;\n enumItems?: string[];\n primary?: boolean;\n", "ChangeSetPersister.ts": "@@ -344,7 +344,7 @@ export class ChangeSetPersister {\n if (changeSets[0].type === ChangeSetType.CREATE) {\n // do not reload things that already had a runtime value\n meta.props\n- .filter(prop => prop.persist !== false && ((prop.primary && prop.autoincrement) || prop.defaultRaw))\n+ .filter(prop => prop.persist !== false && (prop.autoincrement || prop.generated || prop.defaultRaw))\n .filter(prop => (changeSets[0].entity[prop.name] == null && prop.defaultRaw !== 'null') || Utils.isRawSql(changeSets[0].entity[prop.name]))\n .forEach(prop => reloadProps.push(prop));\n }\n@@ -358,7 +358,13 @@ export class ChangeSetPersister {\n }\n });\n });\n- reloadProps.push(...returning);\n+ // reload generated columns\n+ if (!this.platform.usesReturningStatement()) {\n+ meta.props\n+ .filter(prop => prop.generated && !prop.primary)\n+ .forEach(prop => reloadProps.push(prop));\n+ reloadProps.push(...returning);\n+ }\n }\n \n if (reloadProps.length === 0) {\n", "AbstractSqlDriver.ts": "@@ -455,7 +455,7 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection\n \n if (meta && this.platform.usesReturningStatement()) {\n const returningProps = meta.props\n- .filter(prop => prop.persist !== false && ((prop.primary && prop.autoincrement) || prop.defaultRaw || prop.autoincrement))\n+ .filter(prop => prop.persist !== false && prop.defaultRaw || prop.autoincrement || prop.generated)\n .filter(prop => !(prop.name in data[0]) || Utils.isRawSql(data[0][prop.name]));\n const returningFields = Utils.flatten(returningProps.map(prop => prop.fieldNames));\n /* istanbul ignore next */\n@@ -516,6 +516,14 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection\n }\n } else {\n qb.update(data).where(where);\n+\n+ // reload generated columns\n+ const returning: string[] = [];\n+ meta?.props\n+ .filter(prop => prop.generated && !prop.primary)\n+ .forEach(prop => returning.push(prop.name));\n+\n+ qb.returning(returning);\n }\n \n res = await this.rethrow(qb.execute('run', false));\n@@ -566,6 +574,11 @@ export abstract class AbstractSqlDriver<Connection extends AbstractSqlConnection\n });\n });\n \n+ // reload generated columns\n+ meta?.props\n+ .filter(prop => prop.generated && !prop.primary)\n+ .forEach(prop => returning.add(prop.name));\n+\n const pkCond = Utils.flatten(meta.primaryKeys.map(pk => meta.properties[pk].fieldNames)).map(pk => `${this.platform.quoteIdentifier(pk)} = ?`).join(' and ');\n const params: any[] = [];\n let sql = `update ${this.getTableName(meta, options)} set `;\n", "DatabaseTable.ts": "@@ -96,6 +96,7 @@ export class DatabaseTable {\n this.columns[field] = {\n name: prop.fieldNames[idx],\n type: prop.columnTypes[idx],\n+ generated: prop.generated as string,\n mappedType,\n unsigned: prop.unsigned && this.platform.isNumericColumn(mappedType),\n autoincrement: prop.autoincrement ?? primary,\n", "SchemaComparator.ts": "@@ -10,7 +10,7 @@ import {\n type EntityProperty,\n type Logger,\n } from '@mikro-orm/core';\n-import type { CheckDef, Column, ForeignKey, IndexDef, SchemaDifference, TableDifference } from '../typings';\n+import type { Column, ForeignKey, IndexDef, SchemaDifference, TableDifference } from '../typings';\n import type { DatabaseSchema } from './DatabaseSchema';\n import type { DatabaseTable } from './DatabaseTable';\n import type { AbstractSqlPlatform } from '../AbstractSqlPlatform';\n@@ -183,6 +183,13 @@ export class SchemaComparator {\n continue;\n }\n \n+ if (changedProperties.size === 1 && changedProperties.has('generated')) {\n+ tableDifferences.addedColumns[column.name] = toTable.getColumn(column.name)!;\n+ tableDifferences.removedColumns[column.name] = column;\n+ changes++;\n+ continue;\n+ }\n+\n tableDifferences.changedColumns[column.name] = {\n oldColumnName: column.name,\n fromColumn: column,\n@@ -258,7 +265,7 @@ export class SchemaComparator {\n // See if index has changed in \"to\" table\n const toTableCheck = toTable.getCheck(check.name)!;\n \n- if (!this.diffCheck(check, toTableCheck)) {\n+ if (!this.diffExpression(check.expression as string, toTableCheck.expression as string)) {\n continue;\n }\n \n@@ -407,12 +414,12 @@ export class SchemaComparator {\n * Returns the difference between the columns\n * If there are differences this method returns field2, otherwise the boolean false.\n */\n- diffColumn(column1: Column, column2: Column, tableName?: string): Set<string> {\n+ diffColumn(fromColumn: Column, toColumn: Column, tableName?: string): Set<string> {\n const changedProperties = new Set<string>();\n- const prop1 = this.mapColumnToProperty({ ...column1, autoincrement: false });\n- const prop2 = this.mapColumnToProperty({ ...column2, autoincrement: false });\n- const columnType1 = column1.mappedType.getColumnType(prop1, this.platform).toLowerCase();\n- const columnType2 = column2.mappedType.getColumnType(prop2, this.platform).toLowerCase();\n+ const fromProp = this.mapColumnToProperty({ ...fromColumn, autoincrement: false });\n+ const toProp = this.mapColumnToProperty({ ...toColumn, autoincrement: false });\n+ const fromColumnType = fromColumn.mappedType.getColumnType(fromProp, this.platform).toLowerCase();\n+ const toColumnType = toColumn.mappedType.getColumnType(toProp, this.platform).toLowerCase();\n const log = (msg: string, params: Dictionary) => {\n if (tableName) {\n const copy = Utils.copy(params);\n@@ -422,58 +429,63 @@ export class SchemaComparator {\n };\n \n if (\n- columnType1 !== columnType2 &&\n+ fromColumnType !== toColumnType &&\n !(\n- column1.ignoreSchemaChanges?.includes('type') ||\n- column2.ignoreSchemaChanges?.includes('type')\n+ fromColumn.ignoreSchemaChanges?.includes('type') ||\n+ toColumn.ignoreSchemaChanges?.includes('type')\n )\n ) {\n- log(`'type' changed for column ${tableName}.${column1.name}`, { columnType1, columnType2 });\n+ log(`'type' changed for column ${tableName}.${fromColumn.name}`, { fromColumnType, toColumnType });\n changedProperties.add('type');\n }\n \n- if (column1.nullable !== column2.nullable) {\n- log(`'nullable' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ if (fromColumn.nullable !== toColumn.nullable && !fromColumn.generated && !toColumn.generated) {\n+ log(`'nullable' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('nullable');\n }\n \n- if (!!column1.autoincrement !== !!column2.autoincrement) {\n- log(`'autoincrement' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ if (this.diffExpression(fromColumn.generated as string, toColumn.generated as string)) {\n+ log(`'generated' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n+ changedProperties.add('generated');\n+ }\n+\n+ if (!!fromColumn.autoincrement !== !!toColumn.autoincrement) {\n+ log(`'autoincrement' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('autoincrement');\n }\n \n- if (column1.unsigned !== column2.unsigned && this.platform.supportsUnsigned()) {\n- log(`'unsigned' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ if (fromColumn.unsigned !== toColumn.unsigned && this.platform.supportsUnsigned()) {\n+ log(`'unsigned' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('unsigned');\n }\n \n- if (!this.hasSameDefaultValue(column1, column2)) {\n- log(`'default' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ if (!this.hasSameDefaultValue(fromColumn, toColumn)) {\n+ log(`'default' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('default');\n }\n \n- if (this.diffComment(column1.comment, column2.comment)) {\n- log(`'comment' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ if (this.diffComment(fromColumn.comment, toColumn.comment)) {\n+ log(`'comment' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('comment');\n }\n \n if (\n- !(column1.mappedType instanceof ArrayType) &&\n- !(column2.mappedType instanceof ArrayType) &&\n- this.diffEnumItems(column1.enumItems, column2.enumItems)\n+ !(fromColumn.mappedType instanceof ArrayType) &&\n+ !(toColumn.mappedType instanceof ArrayType) &&\n+ this.diffEnumItems(fromColumn.enumItems, toColumn.enumItems)\n ) {\n- log(`'enumItems' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ log(`'enumItems' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('enumItems');\n }\n \n if (\n- (column1.extra || '').toLowerCase() !== (column2.extra || '').toLowerCase() &&\n+ (fromColumn.extra || '').toLowerCase() !== (toColumn.extra || '').toLowerCase() &&\n !(\n- column1.ignoreSchemaChanges?.includes('extra') ||\n- column2.ignoreSchemaChanges?.includes('extra')\n+ fromColumn.ignoreSchemaChanges?.includes('extra') ||\n+ toColumn.ignoreSchemaChanges?.includes('extra')\n )\n ) {\n- log(`'extra' changed for column ${tableName}.${column1.name}`, { column1, column2 });\n+ log(`'extra' changed for column ${tableName}.${fromColumn.name}`, { fromColumn, toColumn });\n changedProperties.add('extra');\n }\n \n@@ -540,11 +552,11 @@ export class SchemaComparator {\n return index1.primary === index2.primary && index1.unique === index2.unique;\n }\n \n- diffCheck(check1: CheckDef, check2: CheckDef): boolean {\n- // check constraint definition might be normalized by the driver,\n+ diffExpression(expr1: string, expr2: string): boolean {\n+ // expressions like check constraints might be normalized by the driver,\n // e.g. quotes might be added (https://github.com/mikro-orm/mikro-orm/issues/3827)\n- const simplify = (str?: string) => str?.replace(/['\"`()]/g, '').toLowerCase();\n- return simplify(check1.expression as string) !== simplify(check2.expression as string);\n+ const simplify = (str?: string) => str?.replace(/_\\w+\\\\'(.*?)\\\\'/g, '$1').replace(/['\"`()]|::\\w+| +/g, '').toLowerCase();\n+ return simplify(expr1) !== simplify(expr2);\n }\n \n hasSameDefaultValue(from: Column, to: Column): boolean {\n", "SchemaHelper.ts": "@@ -135,7 +135,7 @@ export abstract class SchemaHelper {\n createTableColumn(table: Knex.TableBuilder, column: Column, fromTable: DatabaseTable, changedProperties?: Set<string>) {\n const compositePK = fromTable.getPrimaryKey()?.composite;\n \n- if (column.autoincrement && !compositePK && (!changedProperties || changedProperties.has('autoincrement') || changedProperties.has('type'))) {\n+ if (column.autoincrement && !column.generated && !compositePK && (!changedProperties || changedProperties.has('autoincrement') || changedProperties.has('type'))) {\n const primaryKey = !changedProperties && !this.hasNonDefaultPrimaryKeyName(fromTable);\n \n if (column.mappedType instanceof BigIntType) {\n@@ -149,14 +149,20 @@ export abstract class SchemaHelper {\n return table.enum(column.name, column.enumItems);\n }\n \n- return table.specificType(column.name, column.type);\n+ let columnType = column.type;\n+\n+ if (column.generated) {\n+ columnType += ` generated always as ${column.generated}`;\n+ }\n+\n+ return table.specificType(column.name, columnType);\n }\n \n configureColumn(column: Column, col: Knex.ColumnBuilder, knex: Knex, changedProperties?: Set<string>) {\n const guard = (key: string) => !changedProperties || changedProperties.has(key);\n \n Utils.runIfNotEmpty(() => col.nullable(), column.nullable && guard('nullable'));\n- Utils.runIfNotEmpty(() => col.notNullable(), !column.nullable);\n+ Utils.runIfNotEmpty(() => col.notNullable(), !column.nullable && !column.generated);\n Utils.runIfNotEmpty(() => col.unsigned(), column.unsigned);\n Utils.runIfNotEmpty(() => col.comment(column.comment!), column.comment);\n this.configureColumnDefault(column, col, knex, changedProperties);\n", "SqlSchemaGenerator.ts": "@@ -353,6 +353,15 @@ export class SqlSchemaGenerator extends AbstractSchemaGenerator<AbstractSqlDrive\n this.dropCheck(table, check);\n }\n \n+ /* istanbul ignore else */\n+ if (!safe) {\n+ for (const column of Object.values(diff.removedColumns)) {\n+ this.helper.pushTableQuery(table, `alter table ${this.platform.quoteIdentifier(tableName)} drop column ${this.platform.quoteIdentifier(column.name)}`);\n+ }\n+ }\n+ }));\n+\n+ ret.push(this.createSchemaBuilder(schemaName).alterTable(tableName, table => {\n for (const column of Object.values(diff.addedColumns)) {\n const col = this.helper.createTableColumn(table, column, diff.fromTable);\n this.helper.configureColumn(column, col, this.knex);\n@@ -368,13 +377,6 @@ export class SqlSchemaGenerator extends AbstractSchemaGenerator<AbstractSqlDrive\n }\n }\n \n- /* istanbul ignore else */\n- if (!safe) {\n- for (const column of Object.values(diff.removedColumns)) {\n- table.dropColumn(column.name);\n- }\n- }\n-\n for (const { column, changedProperties } of Object.values(diff.changedColumns)) {\n if (changedProperties.size === 1 && changedProperties.has('comment')) {\n continue;\n", "MariaDbSchemaHelper.ts": "@@ -91,6 +91,7 @@ export class MariaDbSchemaHelper extends SchemaHelper {\n column_type as column_type,\n column_key as column_key,\n extra as extra,\n+ generation_expression as generation_expression,\n numeric_precision as numeric_precision,\n numeric_scale as numeric_scale,\n ifnull(datetime_precision, character_maximum_length) length\n@@ -98,15 +99,15 @@ export class MariaDbSchemaHelper extends SchemaHelper {\n order by ordinal_position`;\n const allColumns = await connection.execute<any[]>(sql);\n const str = (val?: string | number | null) => val != null ? '' + val : val;\n- const extra = (val: string) => val.replace(/auto_increment|default_generated/i, '').trim();\n+ const extra = (val: string) => val.replace(/auto_increment|default_generated|(stored|virtual) generated/i, '').trim();\n const ret = {} as Dictionary;\n \n-\n for (const col of allColumns) {\n const mappedType = this.platform.getMappedType(col.column_type);\n const tmp = this.normalizeDefaultValue(col.column_default, col.length);\n const defaultValue = str(tmp === 'NULL' && col.is_nullable === 'YES' ? null : tmp);\n const key = this.getTableKey(col);\n+ const generated = col.generation_expression ? `${col.generation_expression} ${col.extra.match(/stored generated/i) ? 'stored' : 'virtual'}` : undefined;\n ret[key] ??= [];\n ret[key].push({\n name: col.column_name,\n@@ -123,6 +124,7 @@ export class MariaDbSchemaHelper extends SchemaHelper {\n scale: col.numeric_scale,\n comment: col.column_comment,\n extra: extra(col.extra),\n+ generated,\n });\n }\n \n", "MySqlSchemaHelper.ts": "@@ -92,6 +92,7 @@ export class MySqlSchemaHelper extends SchemaHelper {\n column_type as column_type,\n column_key as column_key,\n extra as extra,\n+ generation_expression as generation_expression,\n numeric_precision as numeric_precision,\n numeric_scale as numeric_scale,\n ifnull(datetime_precision, character_maximum_length) length\n@@ -99,13 +100,14 @@ export class MySqlSchemaHelper extends SchemaHelper {\n order by ordinal_position`;\n const allColumns = await connection.execute<any[]>(sql);\n const str = (val?: string | number) => val != null ? '' + val : val;\n- const extra = (val: string) => val.replace(/auto_increment|default_generated/i, '').trim();\n+ const extra = (val: string) => val.replace(/auto_increment|default_generated|(stored|virtual) generated/i, '').trim();\n const ret = {} as Dictionary;\n \n for (const col of allColumns) {\n const mappedType = this.platform.getMappedType(col.column_type);\n const defaultValue = str(this.normalizeDefaultValue(col.column_default, col.length));\n const key = this.getTableKey(col);\n+ const generated = col.generation_expression ? `${col.generation_expression} ${col.extra.match(/stored generated/i) ? 'stored' : 'virtual'}` : undefined;\n ret[key] ??= [];\n ret[key].push({\n name: col.column_name,\n@@ -122,6 +124,7 @@ export class MySqlSchemaHelper extends SchemaHelper {\n scale: col.numeric_scale,\n comment: col.column_comment,\n extra: extra(col.extra),\n+ generated,\n });\n }\n \n", "PostgreSqlPlatform.ts": "@@ -52,8 +52,8 @@ export class PostgreSqlPlatform extends AbstractSqlPlatform {\n return 'time(0)';\n }\n \n- override getIntegerTypeDeclarationSQL(column: { length?: number; autoincrement?: boolean }): string {\n- if (column.autoincrement) {\n+ override getIntegerTypeDeclarationSQL(column: { length?: number; autoincrement?: boolean; generated?: string }): string {\n+ if (column.autoincrement && !column.generated) {\n return `serial`;\n }\n \n", "PostgreSqlSchemaHelper.ts": "@@ -109,6 +109,9 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n numeric_precision,\n numeric_scale,\n data_type,\n+ is_identity,\n+ identity_generation,\n+ generation_expression,\n (select pg_catalog.col_description(c.oid, cols.ordinal_position::int)\n from pg_catalog.pg_class c\n where c.oid = (select ('\"' || cols.table_schema || '\".\"' || cols.table_name || '\"')::regclass::oid) and c.relname = cols.table_name) as column_comment\n@@ -122,7 +125,7 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n \n for (const col of allColumns) {\n const mappedType = connection.getPlatform().getMappedType(col.data_type);\n- const increments = col.column_default?.includes('nextval') && connection.getPlatform().isNumericColumn(mappedType);\n+ const increments = (col.column_default?.includes('nextval') || col.is_identity === 'YES') && connection.getPlatform().isNumericColumn(mappedType);\n const key = this.getTableKey(col);\n ret[key] ??= [];\n const column: Column = {\n@@ -136,6 +139,7 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n default: str(this.normalizeDefaultValue(col.column_default, col.length)),\n unsigned: increments,\n autoincrement: increments,\n+ generated: col.is_identity === 'YES' ? (col.identity_generation === 'BY DEFAULT' ? 'by default as identity' : 'identity') : (col.generation_expression ? col.generation_expression + ' stored' : undefined),\n comment: col.column_comment,\n };\n \n@@ -281,7 +285,7 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n const pk = fromTable.getPrimaryKey();\n const primaryKey = column.primary && !changedProperties && !this.hasNonDefaultPrimaryKeyName(fromTable);\n \n- if (column.autoincrement && !pk?.composite && !changedProperties) {\n+ if (column.autoincrement && !column.generated && !pk?.composite && !changedProperties) {\n if (column.mappedType instanceof BigIntType) {\n return table.bigIncrements(column.name, { primaryKey });\n }\n@@ -308,7 +312,15 @@ export class PostgreSqlSchemaHelper extends SchemaHelper {\n column.type = column.type.replace('serial', 'int');\n }\n \n- return table.specificType(column.name, column.type);\n+ let columnType = column.type;\n+\n+ if (column.generated === 'by default as identity') {\n+ columnType += ` generated ${column.generated}`;\n+ } else if (column.generated) {\n+ columnType += ` generated always as ${column.generated}`;\n+ }\n+\n+ return table.specificType(column.name, columnType);\n }\n \n override configureColumn(column: Column, col: Knex.ColumnBuilder, knex: Knex, changedProperties?: Set<string>) {\n", "SqliteConnection.ts": "@@ -176,7 +176,7 @@ export class SqliteConnection extends AbstractSqlConnection {\n if (obj.method === 'raw') {\n const query = obj.sql.trim().toLowerCase();\n \n- if (query.startsWith('insert into') && query.includes(' returning ')) {\n+ if ((query.startsWith('insert into') || query.startsWith('update ')) && query.includes(' returning ')) {\n return 'all';\n }\n \n", "SqliteSchemaHelper.ts": "@@ -20,16 +20,53 @@ export class SqliteSchemaHelper extends SchemaHelper {\n + `union all select name as table_name from sqlite_temp_master where type = 'table' order by name`;\n }\n \n+ private parseTableDefinition(sql: string, cols: any[]) {\n+ const columns: Dictionary<{ name: string; definition: string }> = {};\n+\n+ // extract all columns definitions\n+ let columnsDef = sql.replaceAll('\\n', '').match(new RegExp(`create table [\\`\"']?.*?[\\`\"']? \\\\((.*)\\\\)`, 'i'))?.[1];\n+\n+ /* istanbul ignore else */\n+ if (columnsDef) {\n+ for (let i = cols.length - 1; i >= 0; i--) {\n+ const col = cols[i];\n+ const re = ` *, *[\\`\"']?${col.name}[\\`\"']? (.*)`;\n+ const columnDef = columnsDef.match(new RegExp(re, 'i'));\n+\n+ /* istanbul ignore else */\n+ if (columnDef) {\n+ columns[col.name] = { name: col.name, definition: columnDef[1] };\n+ columnsDef = columnsDef.substring(0, columnDef.index);\n+ }\n+ }\n+ }\n+\n+ return columns;\n+ }\n+\n override async getColumns(connection: AbstractSqlConnection, tableName: string, schemaName?: string): Promise<any[]> {\n- const columns = await connection.execute<any[]>(`pragma table_info('${tableName}')`);\n+ const columns = await connection.execute<any[]>(`pragma table_xinfo('${tableName}')`);\n const sql = `select sql from sqlite_master where type = ? and name = ?`;\n const tableDefinition = await connection.execute<{ sql: string }>(sql, ['table', tableName], 'get');\n const composite = columns.reduce((count, col) => count + (col.pk ? 1 : 0), 0) > 1;\n // there can be only one, so naive check like this should be enough\n const hasAutoincrement = tableDefinition.sql.toLowerCase().includes('autoincrement');\n+ const columnDefinitions = this.parseTableDefinition(tableDefinition.sql, columns);\n \n return columns.map(col => {\n const mappedType = connection.getPlatform().getMappedType(col.type);\n+ let generated: string | undefined;\n+\n+ if (col.hidden > 1) {\n+ const storage = col.hidden === 2 ? 'virtual' : 'stored';\n+ const re = `(generated always)? as \\\\((.*)\\\\)( ${storage})?$`;\n+ const match = columnDefinitions[col.name].definition.match(re);\n+\n+ if (match) {\n+ generated = `${match[2]} ${storage}`;\n+ }\n+ }\n+\n return {\n name: col.name,\n type: col.type,\n@@ -39,6 +76,7 @@ export class SqliteSchemaHelper extends SchemaHelper {\n mappedType,\n unsigned: false,\n autoincrement: !composite && col.pk && this.platform.isNumericColumn(mappedType) && hasAutoincrement,\n+ generated,\n };\n });\n }\n", "nested-embeddables.postgres.test.ts.snap": "@@ -145,6 +145,7 @@ drop index \"user_profile2_identity_links_meta_bar_index\";\n drop index \"user_profile2_identity_links_metas_bar_index\";\n drop index \"user_profile2_identity_meta_bar_index\";\n drop index \"user_profile2_username_unique\";\n+\n create index \"user_profile1_identity_links_meta_bar_index\" on \"user\" ((\"profile1_identity_links\"->'meta'->>'bar'));\n create index \"user_profile1_identity_links_metas_bar_index\" on \"user\" ((\"profile1_identity_links\"->'metas'->>'bar'));\n create unique index \"user_profile2_identity_email_unique\" on \"user\" ((\"profile2\"->'identity'->>'email'));\n", "generated-columns.mariadb.test.ts.snap": "@@ -0,0 +1,19 @@\n+// Jest Snapshot v1, https://goo.gl/fbAQLP\n+\n+exports[`schema 1`] = `\n+\"set names utf8mb4;\n+set foreign_key_checks = 0;\n+\n+create table \\`user\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`first_name\\` varchar(50) not null, \\`last_name\\` varchar(50) not null, \\`full_name\\` varchar(100) generated always as (concat(first_name, ' ', last_name)) stored, \\`full_name2\\` varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual) default character set utf8mb4 engine = InnoDB;\n+\n+set foreign_key_checks = 1;\n+\"\n+`;\n+\n+exports[`schema 2`] = `\n+\"alter table \\`user\\` drop column \\`full_name\\`;\n+\n+alter table \\`user\\` add \\`full_name\\` varchar(100) generated always as (concat(last_name, ' ', first_name)) stored;\n+\n+\"\n+`;\n", "generated-columns.mysql.test.ts.snap": "@@ -0,0 +1,19 @@\n+// Jest Snapshot v1, https://goo.gl/fbAQLP\n+\n+exports[`schema 1`] = `\n+\"set names utf8mb4;\n+set foreign_key_checks = 0;\n+\n+create table \\`user\\` (\\`id\\` int unsigned not null auto_increment primary key, \\`first_name\\` varchar(50) not null, \\`last_name\\` varchar(50) not null, \\`full_name\\` varchar(100) generated always as (concat(first_name, ' ', last_name)) stored, \\`full_name2\\` varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual) default character set utf8mb4 engine = InnoDB;\n+\n+set foreign_key_checks = 1;\n+\"\n+`;\n+\n+exports[`schema 2`] = `\n+\"alter table \\`user\\` drop column \\`full_name\\`;\n+\n+alter table \\`user\\` add \\`full_name\\` varchar(100) generated always as (concat(last_name, ' ', first_name)) stored;\n+\n+\"\n+`;\n", "generated-columns.postgres.test.ts.snap": "@@ -0,0 +1,19 @@\n+// Jest Snapshot v1, https://goo.gl/fbAQLP\n+\n+exports[`schema 1`] = `\n+\"set names 'utf8';\n+set session_replication_role = 'replica';\n+\n+create table \"user\" (\"id\" int generated by default as identity not null, \"first_name\" varchar(50) not null, \"last_name\" varchar(50) not null, \"full_name\" varchar(100) generated always as (first_name || ' ' || last_name) stored not null, \"full_name2\" varchar(100) generated always as (first_name || ' ' || last_name) stored not null);\n+\n+set session_replication_role = 'origin';\n+\"\n+`;\n+\n+exports[`schema 2`] = `\n+\"alter table \"user\" drop column \"full_name\";\n+\n+alter table \"user\" add column \"full_name\" varchar(100) generated always as (last_name || ' ' || first_name) stored not null;\n+\n+\"\n+`;\n", "generated-columns.sqlite.test.ts.snap": "@@ -0,0 +1,18 @@\n+// Jest Snapshot v1, https://goo.gl/fbAQLP\n+\n+exports[`schema 1`] = `\n+\"pragma foreign_keys = off;\n+\n+create table \\`user\\` (\\`id\\` integer not null primary key autoincrement, \\`first_name\\` text not null, \\`last_name\\` text not null, \\`full_name\\` text generated always as (first_name || ' ' || last_name) virtual, \\`full_name2\\` text generated always as (first_name || ' ' || last_name) virtual);\n+\n+pragma foreign_keys = on;\n+\"\n+`;\n+\n+exports[`schema 2`] = `\n+\"alter table \\`user\\` drop column \\`full_name\\`;\n+\n+alter table \\`user\\` add column \\`full_name\\` text generated always as (last_name || ' ' || first_name) virtual;\n+\n+\"\n+`;\n", "generated-columns.mariadb.test.ts": "@@ -0,0 +1,109 @@\n+import { Entity, MikroORM, Opt, PrimaryKey, Property } from '@mikro-orm/mariadb';\n+\n+@Entity()\n+class User {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+@Entity({ tableName: 'user' })\n+class User1 {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.lastName}, ' ', ${cols.firstName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+let orm: MikroORM;\n+\n+beforeAll(async () => {\n+ orm = await MikroORM.init({\n+ entities: [User],\n+ dbName: 'generated-columns',\n+ port: 3309,\n+ });\n+\n+ await orm.schema.refreshDatabase();\n+});\n+\n+afterAll(() => orm.close(true));\n+\n+test('validation', async () => {\n+ const user = orm.em.create(User, {\n+ firstName: 'First',\n+ lastName: 'Last',\n+ });\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Last',\n+ fullName: 'First Last',\n+ fullName2: 'First Last',\n+ });\n+\n+ user.lastName = 'Changed';\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+\n+ const u = await orm.em.fork().findOneOrFail(User, 1);\n+\n+ expect(u).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+});\n+\n+\n+test('schema', async () => {\n+ const createSQL = await orm.schema.getCreateSchemaSQL();\n+ expect(createSQL).toMatchSnapshot();\n+ const updateSQL = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL).toBe('');\n+\n+ orm.getMetadata().reset('User');\n+ orm.discoverEntity(User1);\n+ const diff1 = await orm.schema.getUpdateSchemaSQL({ wrap: false });\n+ expect(diff1).toMatchSnapshot();\n+ await orm.schema.execute(diff1);\n+\n+ const updateSQL2 = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL2).toBe('');\n+});\n", "generated-columns.mysql.test.ts": "@@ -0,0 +1,109 @@\n+import { Entity, MikroORM, Opt, PrimaryKey, Property } from '@mikro-orm/mysql';\n+\n+@Entity()\n+class User {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.firstName}, ' ', ${cols.lastName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+@Entity({ tableName: 'user' })\n+class User1 {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(concat(${cols.lastName}, ' ', ${cols.firstName})) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (concat(first_name, ' ', last_name)) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+let orm: MikroORM;\n+\n+beforeAll(async () => {\n+ orm = await MikroORM.init({\n+ entities: [User],\n+ dbName: 'generated-columns',\n+ port: 3308,\n+ });\n+\n+ await orm.schema.refreshDatabase();\n+});\n+\n+afterAll(() => orm.close(true));\n+\n+test('validation', async () => {\n+ const user = orm.em.create(User, {\n+ firstName: 'First',\n+ lastName: 'Last',\n+ });\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Last',\n+ fullName: 'First Last',\n+ fullName2: 'First Last',\n+ });\n+\n+ user.lastName = 'Changed';\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+\n+ const u = await orm.em.fork().findOneOrFail(User, 1);\n+\n+ expect(u).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+});\n+\n+\n+test('schema', async () => {\n+ const createSQL = await orm.schema.getCreateSchemaSQL();\n+ expect(createSQL).toMatchSnapshot();\n+ const updateSQL = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL).toBe('');\n+\n+ orm.getMetadata().reset('User');\n+ orm.discoverEntity(User1);\n+ const diff1 = await orm.schema.getUpdateSchemaSQL({ wrap: false });\n+ expect(diff1).toMatchSnapshot();\n+ await orm.schema.execute(diff1);\n+\n+ const updateSQL2 = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL2).toBe('');\n+});\n", "generated-columns.postgres.test.ts": "@@ -0,0 +1,108 @@\n+import { Entity, MikroORM, Opt, PrimaryKey, Property } from '@mikro-orm/postgresql';\n+\n+@Entity()\n+class User {\n+\n+ @PrimaryKey({ generated: 'by default as identity' })\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(${cols.firstName} || ' ' || ${cols.lastName}) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (first_name || ' ' || last_name) stored` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+@Entity({ tableName: 'user' })\n+class User1 {\n+\n+ @PrimaryKey({ generated: 'by default as identity' })\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(${cols.lastName} || ' ' || ${cols.firstName}) stored` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `varchar(100) generated always as (first_name || ' ' || last_name) stored` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+let orm: MikroORM;\n+\n+beforeAll(async () => {\n+ orm = await MikroORM.init({\n+ entities: [User],\n+ dbName: 'generated-columns',\n+ });\n+\n+ await orm.schema.refreshDatabase();\n+});\n+\n+afterAll(() => orm.close(true));\n+\n+test('validation', async () => {\n+ const user = orm.em.create(User, {\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Last',\n+ });\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Last',\n+ fullName: 'First Last',\n+ fullName2: 'First Last',\n+ });\n+\n+ user.lastName = 'Changed';\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+\n+ const u = await orm.em.fork().findOneOrFail(User, 1);\n+\n+ expect(u).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+});\n+\n+test('schema', async () => {\n+ const createSQL = await orm.schema.getCreateSchemaSQL();\n+ expect(createSQL).toMatchSnapshot();\n+ const updateSQL = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL).toBe('');\n+\n+ orm.getMetadata().reset('User');\n+ orm.discoverEntity(User1);\n+ const diff1 = await orm.schema.getUpdateSchemaSQL({ wrap: false });\n+ expect(diff1).toMatchSnapshot();\n+ await orm.schema.execute(diff1);\n+\n+ const updateSQL2 = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL2).toBe('');\n+});\n", "generated-columns.sqlite.test.ts": "@@ -0,0 +1,107 @@\n+import { Entity, MikroORM, Opt, PrimaryKey, Property } from '@mikro-orm/sqlite';\n+\n+@Entity()\n+class User {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(${cols.firstName} || ' ' || ${cols.lastName}) virtual` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `text generated always as (first_name || ' ' || last_name) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+@Entity({ tableName: 'user' })\n+class User1 {\n+\n+ @PrimaryKey()\n+ id!: number;\n+\n+ @Property({ length: 50 })\n+ firstName!: string;\n+\n+ @Property({ length: 50 })\n+ lastName!: string;\n+\n+ @Property<User>({ length: 100, generated: cols => `(${cols.lastName} || ' ' || ${cols.firstName}) virtual` })\n+ fullName!: string & Opt;\n+\n+ @Property<User>({ columnType: `text generated always as (first_name || ' ' || last_name) virtual` })\n+ fullName2!: string & Opt;\n+\n+}\n+\n+let orm: MikroORM;\n+\n+beforeAll(async () => {\n+ orm = await MikroORM.init({\n+ entities: [User],\n+ dbName: ':memory:',\n+ });\n+\n+ await orm.schema.refreshDatabase();\n+});\n+\n+afterAll(() => orm.close(true));\n+\n+test('validation', async () => {\n+ const user = orm.em.create(User, {\n+ firstName: 'First',\n+ lastName: 'Last',\n+ });\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Last',\n+ fullName: 'First Last',\n+ fullName2: 'First Last',\n+ });\n+\n+ user.lastName = 'Changed';\n+ await orm.em.flush();\n+\n+ expect(user).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+\n+ const u = await orm.em.fork().findOneOrFail(User, 1);\n+\n+ expect(u).toEqual({\n+ id: 1,\n+ firstName: 'First',\n+ lastName: 'Changed',\n+ fullName: 'First Changed',\n+ fullName2: 'First Changed',\n+ });\n+});\n+\n+test('schema', async () => {\n+ const createSQL = await orm.schema.getCreateSchemaSQL();\n+ expect(createSQL).toMatchSnapshot();\n+ const updateSQL = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL).toBe('');\n+\n+ orm.getMetadata().reset('User');\n+ orm.discoverEntity(User1);\n+ const diff1 = await orm.schema.getUpdateSchemaSQL({ wrap: false });\n+ expect(diff1).toMatchSnapshot();\n+ await orm.schema.execute(diff1);\n+\n+ const updateSQL2 = await orm.schema.getUpdateSchemaSQL();\n+ expect(updateSQL2).toBe('');\n+});\n", "Migrator.postgres.test.ts.snap": "@@ -385,10 +385,11 @@ const { Migration } = require('@mikro-orm/migrations');\n class Migration20191013214813 extends Migration {\n \n async up() {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down() {\n@@ -409,10 +410,11 @@ exports.Migration20191013214813 = Migration20191013214813;\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"Migration20191013214813.js\",\n@@ -428,10 +430,11 @@ import { Migration } from '@mikro-orm/migrations';\n export class Migration20191013214813 extends Migration {\n \n async up(): Promise<void> {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down(): Promise<void> {\n@@ -451,10 +454,11 @@ export class Migration20191013214813 extends Migration {\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"Migration20191013214813.ts\",\n@@ -468,10 +472,11 @@ exports[`Migrator (postgres) generate migration with custom name with name optio\n export class Migration20191013214813_custom_name extends Migration {\n \n async up(): Promise<void> {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down(): Promise<void> {\n@@ -491,10 +496,11 @@ export class Migration20191013214813_custom_name extends Migration {\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"migration20191013214813_custom_name.ts\",\n@@ -508,10 +514,11 @@ exports[`Migrator (postgres) generate migration with custom name: migration-dump\n export class Migration20191013214813 extends Migration {\n \n async up(): Promise<void> {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down(): Promise<void> {\n@@ -531,10 +538,11 @@ export class Migration20191013214813 extends Migration {\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"migration-20191013214813.ts\",\n@@ -548,10 +556,11 @@ exports[`Migrator (postgres) generate migration with snapshot: migration-snapsho\n export class Migration20191013214813 extends Migration {\n \n async up(): Promise<void> {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down(): Promise<void> {\n@@ -571,10 +580,11 @@ export class Migration20191013214813 extends Migration {\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"Migration20191013214813.ts\",\n@@ -599,10 +609,11 @@ exports[`Migrator (postgres) generate schema migration: migration-dump 1`] = `\n export class Migration20191013214813 extends Migration {\n \n async up(): Promise<void> {\n+ this.addSql('alter table \"book2\" drop column \"foo\";');\n+\n this.addSql('alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);');\n- this.addSql('alter table \"custom\".\"book2\" drop column \"foo\";');\n \n- this.addSql('alter table \"custom\".\"test2\" drop column \"path\";');\n+ this.addSql('alter table \"test2\" drop column \"path\";');\n }\n \n async down(): Promise<void> {\n@@ -622,10 +633,11 @@ export class Migration20191013214813 extends Migration {\n \"alter table \"custom\".\"test2\" add column \"path\" polygon null;\",\n ],\n \"up\": [\n+ \"alter table \"book2\" drop column \"foo\";\",\n+ \"\",\n \"alter table \"custom\".\"book2\" alter column \"double\" type double precision using (\"double\"::double precision);\",\n- \"alter table \"custom\".\"book2\" drop column \"foo\";\",\n \"\",\n- \"alter table \"custom\".\"test2\" drop column \"path\";\",\n+ \"alter table \"test2\" drop column \"path\";\",\n ],\n },\n \"fileName\": \"Migration20191013214813.ts\",\n", "Migrator.test.ts.snap": "@@ -339,11 +339,11 @@ class Migration20191013214813 extends Migration {\n async up() {\n this.addSql('alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;');\n \n- this.addSql('alter table \\`book2\\` drop \\`foo\\`;');\n+ this.addSql('alter table \\`book2\\` drop column \\`foo\\`;');\n \n this.addSql('alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___bar\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___baz\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___bar\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___baz\\`;');\n }\n \n async down() {\n@@ -368,11 +368,11 @@ exports.Migration20191013214813 = Migration20191013214813;\n \"up\": [\n \"alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\",\n \"\",\n- \"alter table \\`book2\\` drop \\`foo\\`;\",\n+ \"alter table \\`book2\\` drop column \\`foo\\`;\",\n \"\",\n \"alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___bar\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___baz\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___bar\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___baz\\`;\",\n ],\n },\n \"fileName\": \"Migration20191013214813.cjs\",\n@@ -390,11 +390,11 @@ class Migration20191013214813 extends Migration {\n async up() {\n this.addSql('alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;');\n \n- this.addSql('alter table \\`book2\\` drop \\`foo\\`;');\n+ this.addSql('alter table \\`book2\\` drop column \\`foo\\`;');\n \n this.addSql('alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___bar\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___baz\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___bar\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___baz\\`;');\n }\n \n async down() {\n@@ -419,11 +419,11 @@ exports.Migration20191013214813 = Migration20191013214813;\n \"up\": [\n \"alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\",\n \"\",\n- \"alter table \\`book2\\` drop \\`foo\\`;\",\n+ \"alter table \\`book2\\` drop column \\`foo\\`;\",\n \"\",\n \"alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___bar\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___baz\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___bar\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___baz\\`;\",\n ],\n },\n \"fileName\": \"Migration20191013214813.js\",\n@@ -439,11 +439,11 @@ export class Migration20191013214813 extends Migration {\n async up(): Promise<void> {\n this.addSql('alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;');\n \n- this.addSql('alter table \\`book2\\` drop \\`foo\\`;');\n+ this.addSql('alter table \\`book2\\` drop column \\`foo\\`;');\n \n this.addSql('alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___bar\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___baz\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___bar\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___baz\\`;');\n }\n \n async down(): Promise<void> {\n@@ -467,11 +467,11 @@ export class Migration20191013214813 extends Migration {\n \"up\": [\n \"alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\",\n \"\",\n- \"alter table \\`book2\\` drop \\`foo\\`;\",\n+ \"alter table \\`book2\\` drop column \\`foo\\`;\",\n \"\",\n \"alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___bar\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___baz\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___bar\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___baz\\`;\",\n ],\n },\n \"fileName\": \"migration-20191013214813.ts\",\n@@ -487,11 +487,11 @@ export class Migration20191013214813 extends Migration {\n async up(): Promise<void> {\n this.addSql('alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;');\n \n- this.addSql('alter table \\`book2\\` drop \\`foo\\`;');\n+ this.addSql('alter table \\`book2\\` drop column \\`foo\\`;');\n \n this.addSql('alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___bar\\`;');\n- this.addSql('alter table \\`test2\\` drop \\`foo___baz\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___bar\\`;');\n+ this.addSql('alter table \\`test2\\` drop column \\`foo___baz\\`;');\n }\n \n async down(): Promise<void> {\n@@ -515,11 +515,11 @@ export class Migration20191013214813 extends Migration {\n \"up\": [\n \"alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\",\n \"\",\n- \"alter table \\`book2\\` drop \\`foo\\`;\",\n+ \"alter table \\`book2\\` drop column \\`foo\\`;\",\n \"\",\n \"alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___bar\\`;\",\n- \"alter table \\`test2\\` drop \\`foo___baz\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___bar\\`;\",\n+ \"alter table \\`test2\\` drop column \\`foo___baz\\`;\",\n ],\n },\n \"fileName\": \"Migration20191013214813.ts\",\n", "GH4782.test.ts.snap": "@@ -13,7 +13,7 @@ exports[`4782: 0. create schema 1`] = `\n \n exports[`4782: 1. add timestamp(3) column 1`] = `\n {\n- \"down\": \"alter table \\`user\\` drop \\`bar\\`;\n+ \"down\": \"alter table \\`user\\` drop column \\`bar\\`;\n \n \",\n \"up\": \"alter table \\`user\\` add \\`bar\\` timestamp(3) not null default current_timestamp(3);\n@@ -45,7 +45,7 @@ exports[`4782: 4. remove timestamp column 1`] = `\n \"down\": \"alter table \\`user\\` add \\`bar\\` timestamp(6) not null default current_timestamp(6);\n \n \",\n- \"up\": \"alter table \\`user\\` drop \\`bar\\`;\n+ \"up\": \"alter table \\`user\\` drop column \\`bar\\`;\n \n \",\n }\n", "SchemaGenerator.mysql.test.ts.snap": "@@ -188,11 +188,11 @@ set foreign_key_checks = 0;\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n set foreign_key_checks = 1;\n \"\n@@ -386,11 +386,11 @@ set foreign_key_checks = 0;\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n set foreign_key_checks = 1;\n \"\n@@ -403,17 +403,18 @@ alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n alter table \\`author2\\` drop index \\`author2_favourite_author_id_index\\`;\n alter table \\`author2\\` drop index \\`author2_name_age_index\\`;\n+\n alter table \\`author2\\` change \\`age\\` \\`age_in_years\\` int null default null;\n alter table \\`author2\\` change \\`favourite_author_id\\` \\`favourite_writer_id\\` int unsigned null;\n alter table \\`author2\\` add constraint \\`author2_favourite_writer_id_foreign\\` foreign key (\\`favourite_writer_id\\`) references \\`author2\\` (\\`id\\`) on update cascade on delete set null;\n alter table \\`author2\\` add index \\`author2_favourite_writer_id_index\\`(\\`favourite_writer_id\\`);\n alter table \\`author2\\` add index \\`author2_name_age_in_years_index\\`(\\`name\\`, \\`age_in_years\\`);\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n@@ -456,11 +457,11 @@ exports[`SchemaGenerator update schema [mysql]: mysql-update-schema-create-table\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n@@ -469,7 +470,7 @@ exports[`SchemaGenerator update schema [mysql]: mysql-update-schema-drop-1:1 1`]\n \"alter table \\`foo_bar2\\` drop foreign key \\`foo_bar2_baz_id_foreign\\`;\n \n alter table \\`foo_bar2\\` drop index \\`foo_bar2_baz_id_unique\\`;\n-alter table \\`foo_bar2\\` drop \\`baz_id\\`;\n+alter table \\`foo_bar2\\` drop column \\`baz_id\\`;\n \n \"\n `;\n@@ -480,11 +481,11 @@ exports[`SchemaGenerator update schema [mysql]: mysql-update-schema-drop-column\n alter table \\`author2\\` drop foreign key \\`author2_favourite_book_uuid_pk_foreign\\`;\n \n alter table \\`new_table\\` drop primary key;\n-alter table \\`new_table\\` drop \\`id\\`;\n-alter table \\`new_table\\` drop \\`updated_at\\`;\n+alter table \\`new_table\\` drop column \\`id\\`;\n+alter table \\`new_table\\` drop column \\`updated_at\\`;\n \n alter table \\`author2\\` drop index \\`author2_favourite_book_uuid_pk_index\\`;\n-alter table \\`author2\\` drop \\`favourite_book_uuid_pk\\`;\n+alter table \\`author2\\` drop column \\`favourite_book_uuid_pk\\`;\n \n \"\n `;\n@@ -544,11 +545,11 @@ exports[`SchemaGenerator update schema enums [mysql]: mysql-update-schema-enums-\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n", "SchemaGenerator.mysql2.test.ts.snap": "@@ -160,11 +160,11 @@ drop table if exists \\`base_user2\\`;\n exports[`SchemaGenerator (no FKs) generate schema from metadata [mysql]: mysql-update-schema-dump 1`] = `\n \"alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n@@ -176,16 +176,17 @@ alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n alter table \\`author2\\` drop index \\`author2_favourite_author_id_index\\`;\n alter table \\`author2\\` drop index \\`author2_name_age_index\\`;\n+\n alter table \\`author2\\` change \\`age\\` \\`age_in_years\\` int null default null;\n alter table \\`author2\\` change \\`favourite_author_id\\` \\`favourite_writer_id\\` int unsigned null;\n alter table \\`author2\\` add index \\`author2_favourite_writer_id_index\\`(\\`favourite_writer_id\\`);\n alter table \\`author2\\` add index \\`author2_name_age_in_years_index\\`(\\`name\\`, \\`age_in_years\\`);\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n@@ -226,11 +227,11 @@ exports[`SchemaGenerator (no FKs) update schema [mysql]: mysql-update-schema-cre\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n@@ -239,7 +240,7 @@ exports[`SchemaGenerator (no FKs) update schema [mysql]: mysql-update-schema-dro\n \"alter table \\`foo_bar2\\` drop foreign key \\`foo_bar2_baz_id_foreign\\`;\n \n alter table \\`foo_bar2\\` drop index \\`foo_bar2_baz_id_unique\\`;\n-alter table \\`foo_bar2\\` drop \\`baz_id\\`;\n+alter table \\`foo_bar2\\` drop column \\`baz_id\\`;\n \n \"\n `;\n@@ -250,11 +251,11 @@ exports[`SchemaGenerator (no FKs) update schema [mysql]: mysql-update-schema-dro\n alter table \\`author2\\` drop foreign key \\`author2_favourite_book_uuid_pk_foreign\\`;\n \n alter table \\`new_table\\` drop primary key;\n-alter table \\`new_table\\` drop \\`id\\`;\n-alter table \\`new_table\\` drop \\`updated_at\\`;\n+alter table \\`new_table\\` drop column \\`id\\`;\n+alter table \\`new_table\\` drop column \\`updated_at\\`;\n \n alter table \\`author2\\` drop index \\`author2_favourite_book_uuid_pk_index\\`;\n-alter table \\`author2\\` drop \\`favourite_book_uuid_pk\\`;\n+alter table \\`author2\\` drop column \\`favourite_book_uuid_pk\\`;\n \n \"\n `;\n@@ -314,11 +315,11 @@ exports[`SchemaGenerator (no FKs) update schema enums [mysql]: mysql-update-sche\n \n alter table \\`test2\\` drop foreign key \\`test2_foo___bar_foreign\\`;\n \n-alter table \\`book2\\` drop \\`foo\\`;\n+alter table \\`book2\\` drop column \\`foo\\`;\n \n alter table \\`test2\\` drop index \\`test2_foo___bar_unique\\`;\n-alter table \\`test2\\` drop \\`foo___bar\\`;\n-alter table \\`test2\\` drop \\`foo___baz\\`;\n+alter table \\`test2\\` drop column \\`foo___bar\\`;\n+alter table \\`test2\\` drop column \\`foo___baz\\`;\n \n \"\n `;\n", "SchemaGenerator.postgres.test.ts.snap": "@@ -309,10 +309,11 @@ alter table \"new_table\" drop constraint \"new_table_pkey\";\n alter table \"new_table\" drop column \"id\";\n alter table \"new_table\" drop column \"updated_at\";\n \n+alter table \"author2\" drop column \"favourite_book_uuid_pk\";\n+\n alter table \"author2\" alter column \"name\" drop default;\n alter table \"author2\" alter column \"name\" type int using (\"name\"::int);\n alter table \"author2\" alter column \"name\" set not null;\n-alter table \"author2\" drop column \"favourite_book_uuid_pk\";\n \n \"\n `;\n@@ -341,6 +342,7 @@ exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-sc\n \"alter table \"author2\" drop constraint \"author2_favourite_author_id_foreign\";\n \n drop index \"author2_name_age_index\";\n+\n alter table \"author2\" rename column \"age\" to \"age_in_years\";\n alter table \"author2\" rename column \"favourite_author_id\" to \"favourite_writer_id\";\n alter table \"author2\" add constraint \"author2_favourite_writer_id_foreign\" foreign key (\"favourite_writer_id\") references \"foo_bar2\" (\"id\") on update cascade on delete set null;\n", "changing-pk-type.mysql.test.ts.snap": "@@ -25,12 +25,14 @@ exports[`changing PK column type [mysql] (GH 1480) changing PK type: 1. change P\n exports[`changing PK column type [mysql] (GH 1480) changing PK type: 2. add new PK (make it composite PK) 1`] = `\n {\n \"down\": \"alter table \\`user\\` drop primary key;\n-alter table \\`user\\` drop \\`id2\\`;\n+alter table \\`user\\` drop column \\`id2\\`;\n+\n alter table \\`user\\` add primary key \\`user_pkey\\`(\\`id\\`);\n \n \",\n- \"up\": \"alter table \\`user\\` add \\`id2\\` int unsigned not null;\n-alter table \\`user\\` drop primary key;\n+ \"up\": \"alter table \\`user\\` drop primary key;\n+\n+alter table \\`user\\` add \\`id2\\` int unsigned not null;\n alter table \\`user\\` add primary key \\`user_pkey\\`(\\`id\\`, \\`id2\\`);\n \n \",\n@@ -39,13 +41,15 @@ alter table \\`user\\` add primary key \\`user_pkey\\`(\\`id\\`, \\`id2\\`);\n \n exports[`changing PK column type [mysql] (GH 1480) changing PK type: 3. remove old PK (make it single PK again) 1`] = `\n {\n- \"down\": \"alter table \\`user\\` add \\`id2\\` int unsigned not null;\n-alter table \\`user\\` drop primary key;\n+ \"down\": \"alter table \\`user\\` drop primary key;\n+\n+alter table \\`user\\` add \\`id2\\` int unsigned not null;\n alter table \\`user\\` add primary key \\`user_pkey\\`(\\`id\\`, \\`id2\\`);\n \n \",\n \"up\": \"alter table \\`user\\` drop primary key;\n-alter table \\`user\\` drop \\`id2\\`;\n+alter table \\`user\\` drop column \\`id2\\`;\n+\n alter table \\`user\\` add primary key \\`user_pkey\\`(\\`id\\`);\n \n \",\n", "changing-pk-type.postgres.test.ts.snap": "@@ -26,11 +26,13 @@ exports[`changing PK column type [postgres] (GH 1480) changing PK type: 2. add n\n {\n \"down\": \"alter table \"user\" drop constraint \"user_pkey\";\n alter table \"user\" drop column \"id2\";\n+\n alter table \"user\" add constraint \"user_pkey\" primary key (\"id\");\n \n \",\n- \"up\": \"alter table \"user\" add column \"id2\" int not null;\n-alter table \"user\" drop constraint \"user_pkey\";\n+ \"up\": \"alter table \"user\" drop constraint \"user_pkey\";\n+\n+alter table \"user\" add column \"id2\" int not null;\n alter table \"user\" add constraint \"user_pkey\" primary key (\"id\", \"id2\");\n \n \",\n@@ -39,13 +41,15 @@ alter table \"user\" add constraint \"user_pkey\" primary key (\"id\", \"id2\");\n \n exports[`changing PK column type [postgres] (GH 1480) changing PK type: 3. remove old PK (make it single PK again) 1`] = `\n {\n- \"down\": \"alter table \"user\" add column \"id2\" int4 not null;\n-alter table \"user\" drop constraint \"user_pkey\";\n+ \"down\": \"alter table \"user\" drop constraint \"user_pkey\";\n+\n+alter table \"user\" add column \"id2\" int4 not null;\n alter table \"user\" add constraint \"user_pkey\" primary key (\"id\", \"id2\");\n \n \",\n \"up\": \"alter table \"user\" drop constraint \"user_pkey\";\n alter table \"user\" drop column \"id2\";\n+\n alter table \"user\" add constraint \"user_pkey\" primary key (\"id\");\n \n \",\n", "check-constraint.mariadb.test.ts.snap": "@@ -8,6 +8,7 @@ exports[`check constraint [mariadb] check constraint diff [mariadb]: mariadb-che\n \n exports[`check constraint [mariadb] check constraint diff [mariadb]: mariadb-check-constraint-diff-2 1`] = `\n \"alter table \\`new_table\\` drop constraint foo;\n+\n alter table \\`new_table\\` add constraint foo check(priceColumn > 0);\n \n \"\n", "check-constraint.mysql.test.ts.snap": "@@ -14,6 +14,7 @@ exports[`check constraint [mysql8] check constraint diff [mysql8]: mysql8-check-\n \n exports[`check constraint [mysql8] check constraint diff [mysql8]: mysql8-check-constraint-diff-2 1`] = `\n \"alter table \\`new_table\\` drop constraint foo;\n+\n alter table \\`new_table\\` add constraint foo check(priceColumn > 0);\n \n \"\n", "check-constraint.postgres.test.ts.snap": "@@ -8,6 +8,7 @@ exports[`check constraint [postgres] check constraint diff [postgres]: postgres-\n \n exports[`check constraint [postgres] check constraint diff [postgres]: postgres-check-constraint-diff-2 1`] = `\n \"alter table \"new_table\" drop constraint foo;\n+\n alter table \"new_table\" add constraint foo check(price > 0);\n \n \"\n", "fk-diffing.postgres.test.ts.snap": "@@ -5,6 +5,7 @@ exports[`dropping tables with FKs in postgres schema generator removes stale FKs\n alter table \"book\" drop constraint \"book_author2_id_foreign\";\n \n alter table \"author\" drop constraint \"author_pkey\";\n+\n alter table \"author\" rename column \"id\" to \"pk\";\n alter table \"author\" add constraint \"author_pkey\" primary key (\"pk\");\n \n@@ -18,11 +19,12 @@ exports[`dropping tables with FKs in postgres schema generator removes stale FKs\n \"alter table \"book\" drop constraint \"book_author1_id_foreign\";\n alter table \"book\" drop constraint \"book_author2_id_foreign\";\n \n+alter table \"book\" drop column \"author1_id\";\n+alter table \"book\" drop column \"author2_id\";\n+\n alter table \"book\" add column \"author1_pk\" int not null, add column \"author2_pk\" int not null;\n alter table \"book\" add constraint \"book_author1_pk_foreign\" foreign key (\"author1_pk\") references \"author\" (\"pk\") on update cascade;\n alter table \"book\" add constraint \"book_author2_pk_foreign\" foreign key (\"author2_pk\") references \"author\" (\"pk\") on update cascade;\n-alter table \"book\" drop column \"author1_id\";\n-alter table \"book\" drop column \"author2_id\";\n \n \"\n `;\n", "index-diffing.mysql.test.ts.snap": "@@ -32,6 +32,7 @@ exports[`indexes on FKs in mysql (GH 1518) schema generator respect indexes on F\n \"alter table \\`book\\` drop index \\`book_meta_data_foo_bar_baz_index\\`;\n alter table \\`book\\` drop index \\`book_meta_data_fooBar_email_unique\\`;\n alter table \\`book\\` drop index \\`custom_index_expr\\`;\n+\n alter table \\`book\\` add index \\`custom_index_expr2\\`(\\`title\\`);\n alter table \\`book\\` add index \\`book_meta_data_foo_bar2_meta_data_foo_bar3_index\\`((json_value(\\`meta_data\\`, '$.foo.bar2' returning char(255))), (json_value(\\`meta_data\\`, '$.foo.bar3' returning char(255))));\n alter table \\`book\\` add index \\`lol41\\`(\\`author3_id\\`);\n@@ -49,6 +50,7 @@ exports[`indexes on FKs in mysql (GH 1518) schema generator respect indexes on F\n alter table \\`book\\` drop index \\`book_meta_data_fooBar_bazBaz_meta_data_fooBar_lol123_unique\\`;\n alter table \\`book\\` drop index \\`lol31\\`;\n alter table \\`book\\` drop index \\`lol41\\`;\n+\n alter table \\`book\\` add index \\`lol42\\`(\\`author3_id\\`);\n alter table \\`book\\` add index \\`lol32\\`(\\`author3_id\\`);\n \n", "index-diffing.postgres.test.ts.snap": "@@ -33,6 +33,7 @@ exports[`indexes on FKs in postgres (GH 1518) schema generator respect indexes o\n \"drop index \"book_meta_data_fooBar_email_unique\";\n drop index \"book_meta_data_foo_bar_baz_index\";\n drop index \"custom_index_expr\";\n+\n create index \"custom_index_expr2\" on \"book\" (\"title\");\n create index \"book_meta_data_foo_meta_data_foo_bar3_index\" on \"book\" ((\"meta_data\"->>'foo'), (\"meta_data\"->'foo'->>'bar3'));\n create index \"lol31\" on \"book\" (\"author3_id\");\n@@ -52,6 +53,7 @@ drop index \"custom_index_expr123\";\n drop index \"custom_index_expr2\";\n drop index \"lol31\";\n drop index \"lol41\";\n+\n create index \"lol42\" on \"book\" (\"author3_id\");\n create index \"lol32\" on \"book\" (\"author3_id\");\n \n", "index-diffing.sqlite.test.ts.snap": "@@ -24,6 +24,7 @@ create unique index \\`isbn_unique_constr\\` on \\`book\\` (\\`isbn\\`);\n exports[`indexes on FKs in postgres (GH 1518) schema generator respect indexes on FKs on column update 3`] = `\n \"drop index \\`book_meta_data_fooBar_email_unique\\`;\n drop index \\`book_meta_data_foo_bar_baz_index\\`;\n+\n create index \\`book_meta_data_foo_bar2_meta_data_foo_bar3_index\\` on \\`book\\` ((json_extract(meta_data, '$.foo.bar2')), (json_extract(meta_data, '$.foo.bar3')));\n create index \\`lol41\\` on \\`book\\` (\\`author3_id\\`);\n create index \\`lol31\\` on \\`book\\` (\\`author3_id\\`);\n@@ -41,6 +42,7 @@ exports[`indexes on FKs in postgres (GH 1518) schema generator respect indexes o\n drop index \\`lol31\\`;\n drop index \\`lol41\\`;\n drop index \\`book_meta_data_foo_bar2_meta_data_foo_bar3_index\\`;\n+\n create index \\`lol42\\` on \\`book\\` (\\`author3_id\\`);\n create index \\`lol32\\` on \\`book\\` (\\`author3_id\\`);\n \n", "serial-property.postgres.test.ts": "@@ -138,14 +138,14 @@ test('schema generator works with non-pk autoincrement columns (serial)', async\n \n expect(mock.mock.calls).toHaveLength(10);\n expect(mock.mock.calls[0][0]).toMatch(`column public.something._id of type serial added`);\n- expect(mock.mock.calls[1][0]).toMatch(`'autoincrement' changed for column public.something._id { column1: { name: '_id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, unsigned: true, autoincrement: true, comment: null, primary: false, unique: false, enumItems: [] }, column2: { name: '_id', type: 'int', mappedType: IntegerType {}, unsigned: false, autoincrement: false, primary: false, nullable: false }}`);\n+ expect(mock.mock.calls[1][0]).toMatch(`'autoincrement' changed for column public.something._id { fromColumn: { name: '_id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, unsigned: true, autoincrement: true, comment: null, primary: false, unique: false, enumItems: [] }, toColumn: { name: '_id', type: 'int', mappedType: IntegerType {}, unsigned: false, autoincrement: false, primary: false, nullable: false }}`);\n expect(mock.mock.calls[2][0]).toMatch(`column public.something._id changed { changedProperties: Set(1) { 'autoincrement' } }`);\n- expect(mock.mock.calls[3][0]).toMatch(`'autoincrement' changed for column public.something._id { column1: { name: '_id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, comment: null, primary: false, unique: false, enumItems: [] }, column2: { name: '_id', type: 'serial', mappedType: IntegerType {}, unsigned: true, autoincrement: true, primary: false, nullable: false }}`);\n+ expect(mock.mock.calls[3][0]).toMatch(`'autoincrement' changed for column public.something._id { fromColumn: { name: '_id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, unsigned: false, autoincrement: false, comment: null, primary: false, unique: false, enumItems: [] }, toColumn: { name: '_id', type: 'serial', mappedType: IntegerType {}, unsigned: true, autoincrement: true, primary: false, nullable: false }}`);\n expect(mock.mock.calls[4][0]).toMatch(`column public.something._id changed { changedProperties: Set(1) { 'autoincrement' } }`);\n expect(mock.mock.calls[5][0]).toMatch(`column public.something._id removed`);\n expect(mock.mock.calls[6][0]).toMatch(`column public.something._id of type serial added`);\n- expect(mock.mock.calls[7][0]).toMatch(`'type' changed for column public.something.id { columnType1: 'int', columnType2: 'varchar(255)' }`);\n- expect(mock.mock.calls[8][0]).toMatch(`'autoincrement' changed for column public.something.id { column1: { name: 'id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, unsigned: true, autoincrement: true, comment: null, primary: true, unique: false, enumItems: [] }, column2: { name: 'id', type: 'varchar(255)', mappedType: StringType {}, unsigned: false, autoincrement: false, primary: false, nullable: false }}`);\n+ expect(mock.mock.calls[7][0]).toMatch(`'type' changed for column public.something.id { fromColumnType: 'int', toColumnType: 'varchar(255)' }`);\n+ expect(mock.mock.calls[8][0]).toMatch(`'autoincrement' changed for column public.something.id { fromColumn: { name: 'id', type: 'int4', mappedType: IntegerType {}, length: null, precision: 32, scale: 0, nullable: false, default: null, unsigned: true, autoincrement: true, comment: null, primary: true, unique: false, enumItems: [] }, toColumn: { name: 'id', type: 'varchar(255)', mappedType: StringType {}, unsigned: false, autoincrement: false, primary: false, nullable: false }}`);\n expect(mock.mock.calls[9][0]).toMatch(`column public.something.id changed { changedProperties: Set(2) { 'type', 'autoincrement' } }`);\n });\n \n"}
test(duckdb): run tests in parallel using an in-memory database
af9a2c8d56e372f937a4e1216cbc9bd72c963b18
test
https://github.com/ibis-project/ibis/commit/af9a2c8d56e372f937a4e1216cbc9bd72c963b18
run tests in parallel using an in-memory database
{"conftest.py": "@@ -1,6 +1,5 @@\n from __future__ import annotations\n \n-import functools\n from pathlib import Path\n from typing import TYPE_CHECKING, Any\n \n@@ -18,38 +17,32 @@ class TestConf(BackendTest, RoundAwayFromZero):\n def __init__(self, data_directory: Path) -> None:\n self.connection = self.connect(data_directory)\n \n+ script_dir = data_directory.parent\n+\n+ schema = (script_dir / 'schema' / 'duckdb.sql').read_text()\n+\n+ with self.connection.begin() as con:\n+ for stmt in filter(None, map(str.strip, schema.split(';'))):\n+ con.exec_driver_sql(stmt)\n+\n+ for table in TEST_TABLES:\n+ src = data_directory / f'{table}.csv'\n+ con.exec_driver_sql(\n+ f\"COPY {table} FROM {str(src)!r} (DELIMITER ',', HEADER, SAMPLE_SIZE 1)\"\n+ )\n+\n @staticmethod\n- def _load_data(\n- data_dir,\n- script_dir,\n- database: str = \"ibis_testing\",\n- **_: Any,\n- ) -> None:\n+ def _load_data(data_dir, script_dir, **_: Any) -> None:\n \"\"\"Load test data into a DuckDB backend instance.\n \n Parameters\n ----------\n data_dir\n Location of test data\n- script_dir\n- Location of scripts defining schemas\n \"\"\"\n- duckdb = pytest.importorskip(\"duckdb\")\n-\n- schema = (script_dir / 'schema' / 'duckdb.sql').read_text()\n-\n- conn = duckdb.connect(str(data_dir / f\"{database}.ddb\"))\n- for stmt in filter(None, map(str.strip, schema.split(';'))):\n- conn.execute(stmt)\n-\n- for table in TEST_TABLES:\n- src = data_dir / f'{table}.csv'\n- conn.execute(\n- f\"COPY {table} FROM {str(src)!r} (DELIMITER ',', HEADER, SAMPLE_SIZE 1)\"\n- )\n+ return TestConf(data_directory=data_dir)\n \n @staticmethod\n- @functools.lru_cache(maxsize=None)\n def connect(data_directory: Path) -> BaseBackend:\n- path = data_directory / \"ibis_testing.ddb\"\n- return ibis.duckdb.connect(str(path)) # type: ignore\n+ pytest.importorskip(\"duckdb\")\n+ return ibis.duckdb.connect() # type: ignore\n", "test_client.py": "@@ -673,14 +673,9 @@ def test_connect_local_file(out_method, extension, test_employee_data_1, tmp_pat\n \n \n @not_windows\n-def test_invalid_connect():\n+def test_invalid_connect(tmp_path):\n pytest.importorskip(\"duckdb\")\n- url = \"?\".join(\n- [\n- \"duckdb://ci/ibis-testing-data/ibis_testing.ddb\",\n- \"read_only=invalid_value\",\n- ]\n- )\n+ url = f\"duckdb://{tmp_path}?read_only=invalid_value\"\n with pytest.raises(ValueError):\n ibis.connect(url)\n \n"}
chore: quote everything (#8172)
61e16f4ec8e8317e170e174ceacbc509453fff9b
chore
https://github.com/rohankumardubey/ibis/commit/61e16f4ec8e8317e170e174ceacbc509453fff9b
quote everything (#8172)
{"__init__.py": "@@ -111,7 +111,7 @@ class Backend(SQLGlotBackend):\n \n \"\"\"\n url = urlparse(url)\n- database = url.path[1:] or \":memory:\"\n+ database = url.path or \":memory:\"\n return self.connect(database=database, **kwargs)\n \n def raw_sql(self, query: str | sg.Expression, **kwargs: Any) -> Any:\n", "compiler.py": "@@ -163,7 +163,7 @@ class SQLGlotCompiler(abc.ABC):\n no_limit_value: sge.Null | None = None\n \"\"\"The value to use to indicate no limit.\"\"\"\n \n- quoted: bool | None = None\n+ quoted: bool = True\n \"\"\"Whether to always quote identifiers.\"\"\"\n \n NAN: ClassVar[sge.Expression] = sge.Cast(\n", "datatypes.py": "@@ -253,6 +253,8 @@ class SqlglotType(TypeMapper):\n else:\n unit = precision_or_span.this.this\n return dt.Interval(unit=unit, nullable=nullable)\n+ elif isinstance(precision_or_span, sge.Var):\n+ return dt.Interval(unit=precision_or_span.this, nullable=nullable)\n elif precision_or_span is None:\n raise com.IbisTypeError(\"Interval precision is None\")\n else:\n", "out.sql": "@@ -1,12 +1,12 @@\n SELECT\n- t0.job,\n- t0.dept_id,\n- t0.year,\n- t0.y\n-FROM foo AS t0\n+ \"t0\".\"job\",\n+ \"t0\".\"dept_id\",\n+ \"t0\".\"year\",\n+ \"t0\".\"y\"\n+FROM \"foo\" AS \"t0\"\n WHERE\n- t0.job IN (\n+ \"t0\".\"job\" IN (\n SELECT\n- t1.job\n- FROM bar AS t1\n+ \"t1\".\"job\"\n+ FROM \"bar\" AS \"t1\"\n )\n\\ No newline at end of file\n", "out_one_unnest.sql": "@@ -1,16 +1,16 @@\n SELECT\n- t0.rowindex,\n- IF(pos = pos_2, repeated_struct_col, NULL) AS repeated_struct_col\n-FROM array_test AS t0\n-CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(t0.repeated_struct_col)) - 1)) AS pos\n-CROSS JOIN UNNEST(t0.repeated_struct_col) AS repeated_struct_col WITH OFFSET AS pos_2\n+ `t0`.`rowindex`,\n+ IF(pos = pos_2, `repeated_struct_col`, NULL) AS `repeated_struct_col`\n+FROM `array_test` AS `t0`\n+CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(`t0`.`repeated_struct_col`)) - 1)) AS pos\n+CROSS JOIN UNNEST(`t0`.`repeated_struct_col`) AS `repeated_struct_col` WITH OFFSET AS pos_2\n WHERE\n pos = pos_2\n OR (\n pos > (\n- ARRAY_LENGTH(t0.repeated_struct_col) - 1\n+ ARRAY_LENGTH(`t0`.`repeated_struct_col`) - 1\n )\n AND pos_2 = (\n- ARRAY_LENGTH(t0.repeated_struct_col) - 1\n+ ARRAY_LENGTH(`t0`.`repeated_struct_col`) - 1\n )\n )\n\\ No newline at end of file\n", "out_two_unnests.sql": "@@ -1,32 +1,32 @@\n SELECT\n- IF(pos = pos_2, level_two, NULL) AS level_two\n+ IF(pos = pos_2, `level_two`, NULL) AS `level_two`\n FROM (\n SELECT\n- t0.rowindex,\n- IF(pos = pos_2, level_one, NULL).nested_struct_col AS level_one\n- FROM array_test AS t0\n- CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(t0.repeated_struct_col)) - 1)) AS pos\n- CROSS JOIN UNNEST(t0.repeated_struct_col) AS level_one WITH OFFSET AS pos_2\n+ `t0`.`rowindex`,\n+ IF(pos = pos_2, `level_one`, NULL).`nested_struct_col` AS `level_one`\n+ FROM `array_test` AS `t0`\n+ CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(`t0`.`repeated_struct_col`)) - 1)) AS pos\n+ CROSS JOIN UNNEST(`t0`.`repeated_struct_col`) AS `level_one` WITH OFFSET AS pos_2\n WHERE\n pos = pos_2\n OR (\n pos > (\n- ARRAY_LENGTH(t0.repeated_struct_col) - 1\n+ ARRAY_LENGTH(`t0`.`repeated_struct_col`) - 1\n )\n AND pos_2 = (\n- ARRAY_LENGTH(t0.repeated_struct_col) - 1\n+ ARRAY_LENGTH(`t0`.`repeated_struct_col`) - 1\n )\n )\n-) AS t1\n-CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(t1.level_one)) - 1)) AS pos\n-CROSS JOIN UNNEST(t1.level_one) AS level_two WITH OFFSET AS pos_2\n+) AS `t1`\n+CROSS JOIN UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(`t1`.`level_one`)) - 1)) AS pos\n+CROSS JOIN UNNEST(`t1`.`level_one`) AS `level_two` WITH OFFSET AS pos_2\n WHERE\n pos = pos_2\n OR (\n pos > (\n- ARRAY_LENGTH(t1.level_one) - 1\n+ ARRAY_LENGTH(`t1`.`level_one`) - 1\n )\n AND pos_2 = (\n- ARRAY_LENGTH(t1.level_one) - 1\n+ ARRAY_LENGTH(`t1`.`level_one`) - 1\n )\n )\n\\ No newline at end of file\n", "out1.sql": "@@ -1,17 +1,17 @@\n SELECT\n- t1.key,\n+ \"t1\".\"key\",\n SUM((\n (\n- t1.value + 1\n+ \"t1\".\"value\" + 1\n ) + 2\n- ) + 3) AS abc\n+ ) + 3) AS \"abc\"\n FROM (\n SELECT\n- t0.key,\n- t0.value\n- FROM t0 AS t0\n+ \"t0\".\"key\",\n+ \"t0\".\"value\"\n+ FROM \"t0\" AS \"t0\"\n WHERE\n- t0.value = 42\n-) AS t1\n+ \"t0\".\"value\" = 42\n+) AS \"t1\"\n GROUP BY\n- t1.key\n\\ No newline at end of file\n+ \"t1\".\"key\"\n\\ No newline at end of file\n", "out2.sql": "@@ -1,17 +1,17 @@\n SELECT\n- t1.key,\n+ \"t1\".\"key\",\n SUM((\n (\n- t1.value + 1\n+ \"t1\".\"value\" + 1\n ) + 2\n- ) + 3) AS foo\n+ ) + 3) AS \"foo\"\n FROM (\n SELECT\n- t0.key,\n- t0.value\n- FROM t0 AS t0\n+ \"t0\".\"key\",\n+ \"t0\".\"value\"\n+ FROM \"t0\" AS \"t0\"\n WHERE\n- t0.value = 42\n-) AS t1\n+ \"t0\".\"value\" = 42\n+) AS \"t1\"\n GROUP BY\n- t1.key\n\\ No newline at end of file\n+ \"t1\".\"key\"\n\\ No newline at end of file\n", "out3.sql": "@@ -1,3 +1,3 @@\n SELECT\n- LEAST(t0.int_col, 10) AS \"Least()\"\n-FROM functional_alltypes AS t0\n\\ No newline at end of file\n+ LEAST(\"t0\".\"int_col\", 10) AS \"Least()\"\n+FROM \"functional_alltypes\" AS \"t0\"\n\\ No newline at end of file\n", "out4.sql": "@@ -1,3 +1,3 @@\n SELECT\n- LEAST(t0.int_col, t0.bigint_col) AS \"Least()\"\n-FROM functional_alltypes AS t0\n\\ No newline at end of file\n+ LEAST(\"t0\".\"int_col\", \"t0\".\"bigint_col\") AS \"Least()\"\n+FROM \"functional_alltypes\" AS \"t0\"\n\\ No newline at end of file\n", "explicit.sql": "@@ -1,13 +1,13 @@\n SELECT\n- t1.foo_id,\n- t1.total\n+ \"t1\".\"foo_id\",\n+ \"t1\".\"total\"\n FROM (\n SELECT\n- t0.foo_id,\n- SUM(t0.f) AS total\n- FROM star1 AS t0\n+ \"t0\".\"foo_id\",\n+ SUM(\"t0\".\"f\") AS \"total\"\n+ FROM \"star1\" AS \"t0\"\n GROUP BY\n 1\n-) AS t1\n+) AS \"t1\"\n WHERE\n- t1.total > CAST(10 AS TINYINT)\n\\ No newline at end of file\n+ \"t1\".\"total\" > CAST(10 AS TINYINT)\n\\ No newline at end of file\n", "inline.sql": "@@ -1,14 +1,14 @@\n SELECT\n- t1.foo_id,\n- t1.total\n+ \"t1\".\"foo_id\",\n+ \"t1\".\"total\"\n FROM (\n SELECT\n- t0.foo_id,\n- SUM(t0.f) AS total,\n+ \"t0\".\"foo_id\",\n+ SUM(\"t0\".\"f\") AS \"total\",\n COUNT(*) AS \"CountStar()\"\n- FROM star1 AS t0\n+ FROM \"star1\" AS \"t0\"\n GROUP BY\n 1\n-) AS t1\n+) AS \"t1\"\n WHERE\n- t1.\"CountStar()\" > CAST(100 AS TINYINT)\n\\ No newline at end of file\n+ \"t1\".\"CountStar()\" > CAST(100 AS TINYINT)\n\\ No newline at end of file\n", "agg_filtered.sql": "@@ -1,23 +1,23 @@\n SELECT\n- t1.g,\n- SUM(t1.foo) AS \"foo total\"\n+ \"t1\".\"g\",\n+ SUM(\"t1\".\"foo\") AS \"foo total\"\n FROM (\n SELECT\n- t0.a,\n- t0.b,\n- t0.c,\n- t0.d,\n- t0.e,\n- t0.f,\n- t0.g,\n- t0.h,\n- t0.i,\n- t0.j,\n- t0.k,\n- t0.a + t0.b AS foo\n- FROM alltypes AS t0\n+ \"t0\".\"a\",\n+ \"t0\".\"b\",\n+ \"t0\".\"c\",\n+ \"t0\".\"d\",\n+ \"t0\".\"e\",\n+ \"t0\".\"f\",\n+ \"t0\".\"g\",\n+ \"t0\".\"h\",\n+ \"t0\".\"i\",\n+ \"t0\".\"j\",\n+ \"t0\".\"k\",\n+ \"t0\".\"a\" + \"t0\".\"b\" AS \"foo\"\n+ FROM \"alltypes\" AS \"t0\"\n WHERE\n- t0.f > CAST(0 AS TINYINT) AND t0.g = 'bar'\n-) AS t1\n+ \"t0\".\"f\" > CAST(0 AS TINYINT) AND \"t0\".\"g\" = 'bar'\n+) AS \"t1\"\n GROUP BY\n 1\n\\ No newline at end of file\n", "agg_filtered2.sql": "@@ -1,25 +1,25 @@\n SELECT\n- t1.g,\n- SUM(t1.foo) AS \"foo total\"\n+ \"t1\".\"g\",\n+ SUM(\"t1\".\"foo\") AS \"foo total\"\n FROM (\n SELECT\n- t0.a,\n- t0.b,\n- t0.c,\n- t0.d,\n- t0.e,\n- t0.f,\n- t0.g,\n- t0.h,\n- t0.i,\n- t0.j,\n- t0.k,\n- t0.a + t0.b AS foo\n- FROM alltypes AS t0\n+ \"t0\".\"a\",\n+ \"t0\".\"b\",\n+ \"t0\".\"c\",\n+ \"t0\".\"d\",\n+ \"t0\".\"e\",\n+ \"t0\".\"f\",\n+ \"t0\".\"g\",\n+ \"t0\".\"h\",\n+ \"t0\".\"i\",\n+ \"t0\".\"j\",\n+ \"t0\".\"k\",\n+ \"t0\".\"a\" + \"t0\".\"b\" AS \"foo\"\n+ FROM \"alltypes\" AS \"t0\"\n WHERE\n- t0.f > CAST(0 AS TINYINT) AND (\n- t0.a + t0.b\n+ \"t0\".\"f\" > CAST(0 AS TINYINT) AND (\n+ \"t0\".\"a\" + \"t0\".\"b\"\n ) < CAST(10 AS TINYINT)\n-) AS t1\n+) AS \"t1\"\n GROUP BY\n 1\n\\ No newline at end of file\n", "filtered.sql": "@@ -1,16 +1,16 @@\n SELECT\n- t0.a,\n- t0.b,\n- t0.c,\n- t0.d,\n- t0.e,\n- t0.f,\n- t0.g,\n- t0.h,\n- t0.i,\n- t0.j,\n- t0.k,\n- t0.a + t0.b AS foo\n-FROM alltypes AS t0\n+ \"t0\".\"a\",\n+ \"t0\".\"b\",\n+ \"t0\".\"c\",\n+ \"t0\".\"d\",\n+ \"t0\".\"e\",\n+ \"t0\".\"f\",\n+ \"t0\".\"g\",\n+ \"t0\".\"h\",\n+ \"t0\".\"i\",\n+ \"t0\".\"j\",\n+ \"t0\".\"k\",\n+ \"t0\".\"a\" + \"t0\".\"b\" AS \"foo\"\n+FROM \"alltypes\" AS \"t0\"\n WHERE\n- t0.f > CAST(0 AS TINYINT) AND t0.g = 'bar'\n\\ No newline at end of file\n+ \"t0\".\"f\" > CAST(0 AS TINYINT) AND \"t0\".\"g\" = 'bar'\n\\ No newline at end of file\n", "proj.sql": "@@ -1,16 +1,16 @@\n SELECT\n- t0.a,\n- t0.b,\n- t0.c,\n- t0.d,\n- t0.e,\n- t0.f,\n- t0.g,\n- t0.h,\n- t0.i,\n- t0.j,\n- t0.k,\n- t0.a + t0.b AS foo\n-FROM alltypes AS t0\n+ \"t0\".\"a\",\n+ \"t0\".\"b\",\n+ \"t0\".\"c\",\n+ \"t0\".\"d\",\n+ \"t0\".\"e\",\n+ \"t0\".\"f\",\n+ \"t0\".\"g\",\n+ \"t0\".\"h\",\n+ \"t0\".\"i\",\n+ \"t0\".\"j\",\n+ \"t0\".\"k\",\n+ \"t0\".\"a\" + \"t0\".\"b\" AS \"foo\"\n+FROM \"alltypes\" AS \"t0\"\n WHERE\n- t0.f > CAST(0 AS TINYINT)\n\\ No newline at end of file\n+ \"t0\".\"f\" > CAST(0 AS TINYINT)\n\\ No newline at end of file\n", "result.sql": "@@ -1,31 +1,31 @@\n SELECT\n- t5.on,\n- t5.by,\n- t5.on_right,\n- t5.by_right,\n- t5.val\n+ \"t5\".\"on\",\n+ \"t5\".\"by\",\n+ \"t5\".\"on_right\",\n+ \"t5\".\"by_right\",\n+ \"t5\".\"val\"\n FROM (\n SELECT\n- t2.on,\n- t2.by,\n- t3.on AS on_right,\n- t3.by AS by_right,\n- t3.val\n- FROM left AS t2\n- LEFT OUTER JOIN right AS t3\n- ON t2.by = t3.by\n-) AS t5\n+ \"t2\".\"on\",\n+ \"t2\".\"by\",\n+ \"t3\".\"on\" AS \"on_right\",\n+ \"t3\".\"by\" AS \"by_right\",\n+ \"t3\".\"val\"\n+ FROM \"left\" AS \"t2\"\n+ LEFT OUTER JOIN \"right\" AS \"t3\"\n+ ON \"t2\".\"by\" = \"t3\".\"by\"\n+) AS \"t5\"\n WHERE\n- t5.on_right = (\n+ \"t5\".\"on_right\" = (\n SELECT\n- MAX(t4.on) AS \"Max(on)\"\n+ MAX(\"t4\".\"on\") AS \"Max(on)\"\n FROM (\n SELECT\n- t1.on,\n- t1.by,\n- t1.val\n- FROM right AS t1\n+ \"t1\".\"on\",\n+ \"t1\".\"by\",\n+ \"t1\".\"val\"\n+ FROM \"right\" AS \"t1\"\n WHERE\n- t1.by = t0.by AND t1.on <= t0.on\n- ) AS t4\n+ \"t1\".\"by\" = \"t0\".\"by\" AND \"t1\".\"on\" <= \"t0\".\"on\"\n+ ) AS \"t4\"\n )\n\\ No newline at end of file\n", "expr3.sql": "@@ -1,23 +1,23 @@\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n-FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+FROM \"star1\" AS \"t0\"\n WHERE\n- t0.f > LN(\n+ \"t0\".\"f\" > LN(\n (\n SELECT\n- AVG(t1.f) AS \"Mean(f)\"\n+ AVG(\"t1\".\"f\") AS \"Mean(f)\"\n FROM (\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n- FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+ FROM \"star1\" AS \"t0\"\n WHERE\n- t0.foo_id = 'foo'\n- ) AS t1\n+ \"t0\".\"foo_id\" = 'foo'\n+ ) AS \"t1\"\n )\n )\n\\ No newline at end of file\n", "expr4.sql": "@@ -1,25 +1,25 @@\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n-FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+FROM \"star1\" AS \"t0\"\n WHERE\n- t0.f > (\n+ \"t0\".\"f\" > (\n LN(\n (\n SELECT\n- AVG(t1.f) AS \"Mean(f)\"\n+ AVG(\"t1\".\"f\") AS \"Mean(f)\"\n FROM (\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n- FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+ FROM \"star1\" AS \"t0\"\n WHERE\n- t0.foo_id = 'foo'\n- ) AS t1\n+ \"t0\".\"foo_id\" = 'foo'\n+ ) AS \"t1\"\n )\n ) + CAST(1 AS TINYINT)\n )\n\\ No newline at end of file\n", "project.sql": "@@ -1,7 +1,7 @@\n SELECT\n- t0.foo,\n- t0.bar,\n- t0.value,\n- t0.foo + t0.bar AS baz,\n- t0.foo * CAST(2 AS TINYINT) AS qux\n-FROM tbl AS t0\n\\ No newline at end of file\n+ \"t0\".\"foo\",\n+ \"t0\".\"bar\",\n+ \"t0\".\"value\",\n+ \"t0\".\"foo\" + \"t0\".\"bar\" AS \"baz\",\n+ \"t0\".\"foo\" * CAST(2 AS TINYINT) AS \"qux\"\n+FROM \"tbl\" AS \"t0\"\n\\ No newline at end of file\n", "project_filter.sql": "@@ -1,9 +1,9 @@\n SELECT\n- t0.foo,\n- t0.bar,\n- t0.value,\n- t0.foo + t0.bar AS baz,\n- t0.foo * CAST(2 AS TINYINT) AS qux\n-FROM tbl AS t0\n+ \"t0\".\"foo\",\n+ \"t0\".\"bar\",\n+ \"t0\".\"value\",\n+ \"t0\".\"foo\" + \"t0\".\"bar\" AS \"baz\",\n+ \"t0\".\"foo\" * CAST(2 AS TINYINT) AS \"qux\"\n+FROM \"tbl\" AS \"t0\"\n WHERE\n- t0.value > CAST(0 AS TINYINT)\n\\ No newline at end of file\n+ \"t0\".\"value\" > CAST(0 AS TINYINT)\n\\ No newline at end of file\n", "inner.sql": "@@ -1,8 +1,8 @@\n SELECT\n- t2.c,\n- t2.f,\n- t2.foo_id,\n- t2.bar_id\n-FROM star1 AS t2\n-INNER JOIN star2 AS t3\n- ON t2.foo_id = t3.foo_id\n\\ No newline at end of file\n+ \"t2\".\"c\",\n+ \"t2\".\"f\",\n+ \"t2\".\"foo_id\",\n+ \"t2\".\"bar_id\"\n+FROM \"star1\" AS \"t2\"\n+INNER JOIN \"star2\" AS \"t3\"\n+ ON \"t2\".\"foo_id\" = \"t3\".\"foo_id\"\n\\ No newline at end of file\n", "inner_two_preds.sql": "@@ -1,8 +1,8 @@\n SELECT\n- t2.c,\n- t2.f,\n- t2.foo_id,\n- t2.bar_id\n-FROM star1 AS t2\n-INNER JOIN star2 AS t3\n- ON t2.foo_id = t3.foo_id AND t2.bar_id = t3.foo_id\n\\ No newline at end of file\n+ \"t2\".\"c\",\n+ \"t2\".\"f\",\n+ \"t2\".\"foo_id\",\n+ \"t2\".\"bar_id\"\n+FROM \"star1\" AS \"t2\"\n+INNER JOIN \"star2\" AS \"t3\"\n+ ON \"t2\".\"foo_id\" = \"t3\".\"foo_id\" AND \"t2\".\"bar_id\" = \"t3\".\"foo_id\"\n\\ No newline at end of file\n", "left.sql": "@@ -1,8 +1,8 @@\n SELECT\n- t2.c,\n- t2.f,\n- t2.foo_id,\n- t2.bar_id\n-FROM star1 AS t2\n-LEFT OUTER JOIN star2 AS t3\n- ON t2.foo_id = t3.foo_id\n\\ No newline at end of file\n+ \"t2\".\"c\",\n+ \"t2\".\"f\",\n+ \"t2\".\"foo_id\",\n+ \"t2\".\"bar_id\"\n+FROM \"star1\" AS \"t2\"\n+LEFT OUTER JOIN \"star2\" AS \"t3\"\n+ ON \"t2\".\"foo_id\" = \"t3\".\"foo_id\"\n\\ No newline at end of file\n", "outer.sql": "@@ -1,8 +1,8 @@\n SELECT\n- t2.c,\n- t2.f,\n- t2.foo_id,\n- t2.bar_id\n-FROM star1 AS t2\n-FULL OUTER JOIN star2 AS t3\n- ON t2.foo_id = t3.foo_id\n\\ No newline at end of file\n+ \"t2\".\"c\",\n+ \"t2\".\"f\",\n+ \"t2\".\"foo_id\",\n+ \"t2\".\"bar_id\"\n+FROM \"star1\" AS \"t2\"\n+FULL OUTER JOIN \"star2\" AS \"t3\"\n+ ON \"t2\".\"foo_id\" = \"t3\".\"foo_id\"\n\\ No newline at end of file\n", "result1.sql": "@@ -1,12 +1,12 @@\n SELECT\n- t1.b,\n- COUNT(*) AS b_count\n+ \"t1\".\"b\",\n+ COUNT(*) AS \"b_count\"\n FROM (\n SELECT\n- t0.b\n- FROM t AS t0\n+ \"t0\".\"b\"\n+ FROM \"t\" AS \"t0\"\n ORDER BY\n- t0.a ASC\n-) AS t1\n+ \"t0\".\"a\" ASC\n+) AS \"t1\"\n GROUP BY\n 1\n\\ No newline at end of file\n", "result2.sql": "@@ -1,12 +1,12 @@\n SELECT\n- t1.b,\n- COUNT(*) AS b_count\n+ \"t1\".\"b\",\n+ COUNT(*) AS \"b_count\"\n FROM (\n SELECT\n- t0.b\n- FROM t AS t0\n+ \"t0\".\"b\"\n+ FROM \"t\" AS \"t0\"\n ORDER BY\n- t0.b ASC\n-) AS t1\n+ \"t0\".\"b\" ASC\n+) AS \"t1\"\n GROUP BY\n 1\n\\ No newline at end of file\n", "expr.sql": "@@ -1,12 +1,12 @@\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n-FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+FROM \"star1\" AS \"t0\"\n WHERE\n- t0.f > (\n+ \"t0\".\"f\" > (\n SELECT\n- AVG(t0.f) AS \"Mean(f)\"\n- FROM star1 AS t0\n+ AVG(\"t0\".\"f\") AS \"Mean(f)\"\n+ FROM \"star1\" AS \"t0\"\n )\n\\ No newline at end of file\n", "expr2.sql": "@@ -1,21 +1,21 @@\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n-FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+FROM \"star1\" AS \"t0\"\n WHERE\n- t0.f > (\n+ \"t0\".\"f\" > (\n SELECT\n- AVG(t1.f) AS \"Mean(f)\"\n+ AVG(\"t1\".\"f\") AS \"Mean(f)\"\n FROM (\n SELECT\n- t0.c,\n- t0.f,\n- t0.foo_id,\n- t0.bar_id\n- FROM star1 AS t0\n+ \"t0\".\"c\",\n+ \"t0\".\"f\",\n+ \"t0\".\"foo_id\",\n+ \"t0\".\"bar_id\"\n+ FROM \"star1\" AS \"t0\"\n WHERE\n- t0.foo_id = 'foo'\n- ) AS t1\n+ \"t0\".\"foo_id\" = 'foo'\n+ ) AS \"t1\"\n )\n\\ No newline at end of file\n", "e1.sql": "@@ -1,13 +1,13 @@\n SELECT\n- t0.key1,\n- t0.key2,\n- t0.value1\n-FROM foo_t AS t0\n+ \"t0\".\"key1\",\n+ \"t0\".\"key2\",\n+ \"t0\".\"value1\"\n+FROM \"foo_t\" AS \"t0\"\n WHERE\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM bar_t AS t1\n+ FROM \"bar_t\" AS \"t1\"\n WHERE\n- t0.key1 = t1.key1\n+ \"t0\".\"key1\" = \"t1\".\"key1\"\n )\n\\ No newline at end of file\n", "e2.sql": "@@ -1,17 +1,17 @@\n SELECT\n- t0.key1,\n- t0.key2,\n- t0.value1\n-FROM foo_t AS t0\n+ \"t0\".\"key1\",\n+ \"t0\".\"key2\",\n+ \"t0\".\"value1\"\n+FROM \"foo_t\" AS \"t0\"\n WHERE\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM bar_t AS t1\n+ FROM \"bar_t\" AS \"t1\"\n WHERE\n (\n- t0.key1 = t1.key1\n+ \"t0\".\"key1\" = \"t1\".\"key1\"\n ) AND (\n- t1.key2 = 'foo'\n+ \"t1\".\"key2\" = 'foo'\n )\n )\n\\ No newline at end of file\n", "anti.sql": "@@ -1,25 +1,25 @@\n SELECT\n- t0.id,\n- t0.bool_col,\n- t0.tinyint_col,\n- t0.smallint_col,\n- t0.int_col,\n- t0.bigint_col,\n- t0.float_col,\n- t0.double_col,\n- t0.date_string_col,\n- t0.string_col,\n- t0.timestamp_col,\n- t0.year,\n- t0.month\n-FROM functional_alltypes AS t0\n+ \"t0\".\"id\",\n+ \"t0\".\"bool_col\",\n+ \"t0\".\"tinyint_col\",\n+ \"t0\".\"smallint_col\",\n+ \"t0\".\"int_col\",\n+ \"t0\".\"bigint_col\",\n+ \"t0\".\"float_col\",\n+ \"t0\".\"double_col\",\n+ \"t0\".\"date_string_col\",\n+ \"t0\".\"string_col\",\n+ \"t0\".\"timestamp_col\",\n+ \"t0\".\"year\",\n+ \"t0\".\"month\"\n+FROM \"functional_alltypes\" AS \"t0\"\n WHERE\n NOT (\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM functional_alltypes AS t1\n+ FROM \"functional_alltypes\" AS \"t1\"\n WHERE\n- t0.string_col = t1.string_col\n+ \"t0\".\"string_col\" = \"t1\".\"string_col\"\n )\n )\n\\ No newline at end of file\n", "semi.sql": "@@ -1,23 +1,23 @@\n SELECT\n- t0.id,\n- t0.bool_col,\n- t0.tinyint_col,\n- t0.smallint_col,\n- t0.int_col,\n- t0.bigint_col,\n- t0.float_col,\n- t0.double_col,\n- t0.date_string_col,\n- t0.string_col,\n- t0.timestamp_col,\n- t0.year,\n- t0.month\n-FROM functional_alltypes AS t0\n+ \"t0\".\"id\",\n+ \"t0\".\"bool_col\",\n+ \"t0\".\"tinyint_col\",\n+ \"t0\".\"smallint_col\",\n+ \"t0\".\"int_col\",\n+ \"t0\".\"bigint_col\",\n+ \"t0\".\"float_col\",\n+ \"t0\".\"double_col\",\n+ \"t0\".\"date_string_col\",\n+ \"t0\".\"string_col\",\n+ \"t0\".\"timestamp_col\",\n+ \"t0\".\"year\",\n+ \"t0\".\"month\"\n+FROM \"functional_alltypes\" AS \"t0\"\n WHERE\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM functional_alltypes AS t1\n+ FROM \"functional_alltypes\" AS \"t1\"\n WHERE\n- t0.string_col = t1.string_col\n+ \"t0\".\"string_col\" = \"t1\".\"string_col\"\n )\n\\ No newline at end of file\n", "test_temporal.py": "@@ -2639,11 +2639,6 @@ no_time_type = pytest.mark.xfail(\n - {\"pyspark\", \"impala\", \"clickhouse\", \"oracle\", *_NO_SQLGLOT_DIALECT}\n ),\n *no_sqlglot_dialect,\n- *[\n- param(\"impala\", marks=no_time_type),\n- param(\"clickhouse\", marks=no_time_type),\n- param(\"oracle\", marks=no_time_type),\n- ],\n ],\n )\n @pytest.mark.parametrize(\"micros\", [0, 234567])\n", "h01.sql": "@@ -1,62 +1,62 @@\n SELECT\n- t2.l_returnflag,\n- t2.l_linestatus,\n- t2.sum_qty,\n- t2.sum_base_price,\n- t2.sum_disc_price,\n- t2.sum_charge,\n- t2.avg_qty,\n- t2.avg_price,\n- t2.avg_disc,\n- t2.count_order\n+ \"t2\".\"l_returnflag\",\n+ \"t2\".\"l_linestatus\",\n+ \"t2\".\"sum_qty\",\n+ \"t2\".\"sum_base_price\",\n+ \"t2\".\"sum_disc_price\",\n+ \"t2\".\"sum_charge\",\n+ \"t2\".\"avg_qty\",\n+ \"t2\".\"avg_price\",\n+ \"t2\".\"avg_disc\",\n+ \"t2\".\"count_order\"\n FROM (\n SELECT\n- t1.l_returnflag,\n- t1.l_linestatus,\n- SUM(t1.l_quantity) AS sum_qty,\n- SUM(t1.l_extendedprice) AS sum_base_price,\n- SUM(t1.l_extendedprice * (\n- CAST(1 AS TINYINT) - t1.l_discount\n- )) AS sum_disc_price,\n+ \"t1\".\"l_returnflag\",\n+ \"t1\".\"l_linestatus\",\n+ SUM(\"t1\".\"l_quantity\") AS \"sum_qty\",\n+ SUM(\"t1\".\"l_extendedprice\") AS \"sum_base_price\",\n+ SUM(\"t1\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t1\".\"l_discount\"\n+ )) AS \"sum_disc_price\",\n SUM(\n (\n- t1.l_extendedprice * (\n- CAST(1 AS TINYINT) - t1.l_discount\n+ \"t1\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t1\".\"l_discount\"\n )\n ) * (\n- t1.l_tax + CAST(1 AS TINYINT)\n+ \"t1\".\"l_tax\" + CAST(1 AS TINYINT)\n )\n- ) AS sum_charge,\n- AVG(t1.l_quantity) AS avg_qty,\n- AVG(t1.l_extendedprice) AS avg_price,\n- AVG(t1.l_discount) AS avg_disc,\n- COUNT(*) AS count_order\n+ ) AS \"sum_charge\",\n+ AVG(\"t1\".\"l_quantity\") AS \"avg_qty\",\n+ AVG(\"t1\".\"l_extendedprice\") AS \"avg_price\",\n+ AVG(\"t1\".\"l_discount\") AS \"avg_disc\",\n+ COUNT(*) AS \"count_order\"\n FROM (\n SELECT\n- t0.l_orderkey,\n- t0.l_partkey,\n- t0.l_suppkey,\n- t0.l_linenumber,\n- t0.l_quantity,\n- t0.l_extendedprice,\n- t0.l_discount,\n- t0.l_tax,\n- t0.l_returnflag,\n- t0.l_linestatus,\n- t0.l_shipdate,\n- t0.l_commitdate,\n- t0.l_receiptdate,\n- t0.l_shipinstruct,\n- t0.l_shipmode,\n- t0.l_comment\n- FROM lineitem AS t0\n+ \"t0\".\"l_orderkey\",\n+ \"t0\".\"l_partkey\",\n+ \"t0\".\"l_suppkey\",\n+ \"t0\".\"l_linenumber\",\n+ \"t0\".\"l_quantity\",\n+ \"t0\".\"l_extendedprice\",\n+ \"t0\".\"l_discount\",\n+ \"t0\".\"l_tax\",\n+ \"t0\".\"l_returnflag\",\n+ \"t0\".\"l_linestatus\",\n+ \"t0\".\"l_shipdate\",\n+ \"t0\".\"l_commitdate\",\n+ \"t0\".\"l_receiptdate\",\n+ \"t0\".\"l_shipinstruct\",\n+ \"t0\".\"l_shipmode\",\n+ \"t0\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t0\"\n WHERE\n- t0.l_shipdate <= MAKE_DATE(1998, 9, 2)\n- ) AS t1\n+ \"t0\".\"l_shipdate\" <= MAKE_DATE(1998, 9, 2)\n+ ) AS \"t1\"\n GROUP BY\n 1,\n 2\n-) AS t2\n+) AS \"t2\"\n ORDER BY\n- t2.l_returnflag ASC,\n- t2.l_linestatus ASC\n\\ No newline at end of file\n+ \"t2\".\"l_returnflag\" ASC,\n+ \"t2\".\"l_linestatus\" ASC\n\\ No newline at end of file\n", "h02.sql": "@@ -1,116 +1,116 @@\n SELECT\n- t14.s_acctbal,\n- t14.s_name,\n- t14.n_name,\n- t14.p_partkey,\n- t14.p_mfgr,\n- t14.s_address,\n- t14.s_phone,\n- t14.s_comment\n+ \"t14\".\"s_acctbal\",\n+ \"t14\".\"s_name\",\n+ \"t14\".\"n_name\",\n+ \"t14\".\"p_partkey\",\n+ \"t14\".\"p_mfgr\",\n+ \"t14\".\"s_address\",\n+ \"t14\".\"s_phone\",\n+ \"t14\".\"s_comment\"\n FROM (\n SELECT\n- t5.p_partkey,\n- t5.p_name,\n- t5.p_mfgr,\n- t5.p_brand,\n- t5.p_type,\n- t5.p_size,\n- t5.p_container,\n- t5.p_retailprice,\n- t5.p_comment,\n- t6.ps_partkey,\n- t6.ps_suppkey,\n- t6.ps_availqty,\n- t6.ps_supplycost,\n- t6.ps_comment,\n- t8.s_suppkey,\n- t8.s_name,\n- t8.s_address,\n- t8.s_nationkey,\n- t8.s_phone,\n- t8.s_acctbal,\n- t8.s_comment,\n- t10.n_nationkey,\n- t10.n_name,\n- t10.n_regionkey,\n- t10.n_comment,\n- t12.r_regionkey,\n- t12.r_name,\n- t12.r_comment\n- FROM part AS t5\n- INNER JOIN partsupp AS t6\n- ON t5.p_partkey = t6.ps_partkey\n- INNER JOIN supplier AS t8\n- ON t8.s_suppkey = t6.ps_suppkey\n- INNER JOIN nation AS t10\n- ON t8.s_nationkey = t10.n_nationkey\n- INNER JOIN region AS t12\n- ON t10.n_regionkey = t12.r_regionkey\n-) AS t14\n+ \"t5\".\"p_partkey\",\n+ \"t5\".\"p_name\",\n+ \"t5\".\"p_mfgr\",\n+ \"t5\".\"p_brand\",\n+ \"t5\".\"p_type\",\n+ \"t5\".\"p_size\",\n+ \"t5\".\"p_container\",\n+ \"t5\".\"p_retailprice\",\n+ \"t5\".\"p_comment\",\n+ \"t6\".\"ps_partkey\",\n+ \"t6\".\"ps_suppkey\",\n+ \"t6\".\"ps_availqty\",\n+ \"t6\".\"ps_supplycost\",\n+ \"t6\".\"ps_comment\",\n+ \"t8\".\"s_suppkey\",\n+ \"t8\".\"s_name\",\n+ \"t8\".\"s_address\",\n+ \"t8\".\"s_nationkey\",\n+ \"t8\".\"s_phone\",\n+ \"t8\".\"s_acctbal\",\n+ \"t8\".\"s_comment\",\n+ \"t10\".\"n_nationkey\",\n+ \"t10\".\"n_name\",\n+ \"t10\".\"n_regionkey\",\n+ \"t10\".\"n_comment\",\n+ \"t12\".\"r_regionkey\",\n+ \"t12\".\"r_name\",\n+ \"t12\".\"r_comment\"\n+ FROM \"part\" AS \"t5\"\n+ INNER JOIN \"partsupp\" AS \"t6\"\n+ ON \"t5\".\"p_partkey\" = \"t6\".\"ps_partkey\"\n+ INNER JOIN \"supplier\" AS \"t8\"\n+ ON \"t8\".\"s_suppkey\" = \"t6\".\"ps_suppkey\"\n+ INNER JOIN \"nation\" AS \"t10\"\n+ ON \"t8\".\"s_nationkey\" = \"t10\".\"n_nationkey\"\n+ INNER JOIN \"region\" AS \"t12\"\n+ ON \"t10\".\"n_regionkey\" = \"t12\".\"r_regionkey\"\n+) AS \"t14\"\n WHERE\n- t14.p_size = CAST(15 AS TINYINT)\n- AND t14.p_type LIKE '%BRASS'\n- AND t14.r_name = 'EUROPE'\n- AND t14.ps_supplycost = (\n+ \"t14\".\"p_size\" = CAST(15 AS TINYINT)\n+ AND \"t14\".\"p_type\" LIKE '%BRASS'\n+ AND \"t14\".\"r_name\" = 'EUROPE'\n+ AND \"t14\".\"ps_supplycost\" = (\n SELECT\n- MIN(t16.ps_supplycost) AS \"Min(ps_supplycost)\"\n+ MIN(\"t16\".\"ps_supplycost\") AS \"Min(ps_supplycost)\"\n FROM (\n SELECT\n- t15.ps_partkey,\n- t15.ps_suppkey,\n- t15.ps_availqty,\n- t15.ps_supplycost,\n- t15.ps_comment,\n- t15.s_suppkey,\n- t15.s_name,\n- t15.s_address,\n- t15.s_nationkey,\n- t15.s_phone,\n- t15.s_acctbal,\n- t15.s_comment,\n- t15.n_nationkey,\n- t15.n_name,\n- t15.n_regionkey,\n- t15.n_comment,\n- t15.r_regionkey,\n- t15.r_name,\n- t15.r_comment\n+ \"t15\".\"ps_partkey\",\n+ \"t15\".\"ps_suppkey\",\n+ \"t15\".\"ps_availqty\",\n+ \"t15\".\"ps_supplycost\",\n+ \"t15\".\"ps_comment\",\n+ \"t15\".\"s_suppkey\",\n+ \"t15\".\"s_name\",\n+ \"t15\".\"s_address\",\n+ \"t15\".\"s_nationkey\",\n+ \"t15\".\"s_phone\",\n+ \"t15\".\"s_acctbal\",\n+ \"t15\".\"s_comment\",\n+ \"t15\".\"n_nationkey\",\n+ \"t15\".\"n_name\",\n+ \"t15\".\"n_regionkey\",\n+ \"t15\".\"n_comment\",\n+ \"t15\".\"r_regionkey\",\n+ \"t15\".\"r_name\",\n+ \"t15\".\"r_comment\"\n FROM (\n SELECT\n- t7.ps_partkey,\n- t7.ps_suppkey,\n- t7.ps_availqty,\n- t7.ps_supplycost,\n- t7.ps_comment,\n- t9.s_suppkey,\n- t9.s_name,\n- t9.s_address,\n- t9.s_nationkey,\n- t9.s_phone,\n- t9.s_acctbal,\n- t9.s_comment,\n- t11.n_nationkey,\n- t11.n_name,\n- t11.n_regionkey,\n- t11.n_comment,\n- t13.r_regionkey,\n- t13.r_name,\n- t13.r_comment\n- FROM partsupp AS t7\n- INNER JOIN supplier AS t9\n- ON t9.s_suppkey = t7.ps_suppkey\n- INNER JOIN nation AS t11\n- ON t9.s_nationkey = t11.n_nationkey\n- INNER JOIN region AS t13\n- ON t11.n_regionkey = t13.r_regionkey\n- ) AS t15\n+ \"t7\".\"ps_partkey\",\n+ \"t7\".\"ps_suppkey\",\n+ \"t7\".\"ps_availqty\",\n+ \"t7\".\"ps_supplycost\",\n+ \"t7\".\"ps_comment\",\n+ \"t9\".\"s_suppkey\",\n+ \"t9\".\"s_name\",\n+ \"t9\".\"s_address\",\n+ \"t9\".\"s_nationkey\",\n+ \"t9\".\"s_phone\",\n+ \"t9\".\"s_acctbal\",\n+ \"t9\".\"s_comment\",\n+ \"t11\".\"n_nationkey\",\n+ \"t11\".\"n_name\",\n+ \"t11\".\"n_regionkey\",\n+ \"t11\".\"n_comment\",\n+ \"t13\".\"r_regionkey\",\n+ \"t13\".\"r_name\",\n+ \"t13\".\"r_comment\"\n+ FROM \"partsupp\" AS \"t7\"\n+ INNER JOIN \"supplier\" AS \"t9\"\n+ ON \"t9\".\"s_suppkey\" = \"t7\".\"ps_suppkey\"\n+ INNER JOIN \"nation\" AS \"t11\"\n+ ON \"t9\".\"s_nationkey\" = \"t11\".\"n_nationkey\"\n+ INNER JOIN \"region\" AS \"t13\"\n+ ON \"t11\".\"n_regionkey\" = \"t13\".\"r_regionkey\"\n+ ) AS \"t15\"\n WHERE\n- t15.r_name = 'EUROPE' AND t14.p_partkey = t15.ps_partkey\n- ) AS t16\n+ \"t15\".\"r_name\" = 'EUROPE' AND \"t14\".\"p_partkey\" = \"t15\".\"ps_partkey\"\n+ ) AS \"t16\"\n )\n ORDER BY\n- t14.s_acctbal DESC,\n- t14.n_name ASC,\n- t14.s_name ASC,\n- t14.p_partkey ASC\n+ \"t14\".\"s_acctbal\" DESC,\n+ \"t14\".\"n_name\" ASC,\n+ \"t14\".\"s_name\" ASC,\n+ \"t14\".\"p_partkey\" ASC\n LIMIT 100\n\\ No newline at end of file\n", "h03.sql": "@@ -1,103 +1,103 @@\n SELECT\n- t8.l_orderkey,\n- t8.revenue,\n- t8.o_orderdate,\n- t8.o_shippriority\n+ \"t8\".\"l_orderkey\",\n+ \"t8\".\"revenue\",\n+ \"t8\".\"o_orderdate\",\n+ \"t8\".\"o_shippriority\"\n FROM (\n SELECT\n- t7.l_orderkey,\n- t7.o_orderdate,\n- t7.o_shippriority,\n- SUM(t7.l_extendedprice * (\n- CAST(1 AS TINYINT) - t7.l_discount\n- )) AS revenue\n+ \"t7\".\"l_orderkey\",\n+ \"t7\".\"o_orderdate\",\n+ \"t7\".\"o_shippriority\",\n+ SUM(\"t7\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t7\".\"l_discount\"\n+ )) AS \"revenue\"\n FROM (\n SELECT\n- t6.c_custkey,\n- t6.c_name,\n- t6.c_address,\n- t6.c_nationkey,\n- t6.c_phone,\n- t6.c_acctbal,\n- t6.c_mktsegment,\n- t6.c_comment,\n- t6.o_orderkey,\n- t6.o_custkey,\n- t6.o_orderstatus,\n- t6.o_totalprice,\n- t6.o_orderdate,\n- t6.o_orderpriority,\n- t6.o_clerk,\n- t6.o_shippriority,\n- t6.o_comment,\n- t6.l_orderkey,\n- t6.l_partkey,\n- t6.l_suppkey,\n- t6.l_linenumber,\n- t6.l_quantity,\n- t6.l_extendedprice,\n- t6.l_discount,\n- t6.l_tax,\n- t6.l_returnflag,\n- t6.l_linestatus,\n- t6.l_shipdate,\n- t6.l_commitdate,\n- t6.l_receiptdate,\n- t6.l_shipinstruct,\n- t6.l_shipmode,\n- t6.l_comment\n+ \"t6\".\"c_custkey\",\n+ \"t6\".\"c_name\",\n+ \"t6\".\"c_address\",\n+ \"t6\".\"c_nationkey\",\n+ \"t6\".\"c_phone\",\n+ \"t6\".\"c_acctbal\",\n+ \"t6\".\"c_mktsegment\",\n+ \"t6\".\"c_comment\",\n+ \"t6\".\"o_orderkey\",\n+ \"t6\".\"o_custkey\",\n+ \"t6\".\"o_orderstatus\",\n+ \"t6\".\"o_totalprice\",\n+ \"t6\".\"o_orderdate\",\n+ \"t6\".\"o_orderpriority\",\n+ \"t6\".\"o_clerk\",\n+ \"t6\".\"o_shippriority\",\n+ \"t6\".\"o_comment\",\n+ \"t6\".\"l_orderkey\",\n+ \"t6\".\"l_partkey\",\n+ \"t6\".\"l_suppkey\",\n+ \"t6\".\"l_linenumber\",\n+ \"t6\".\"l_quantity\",\n+ \"t6\".\"l_extendedprice\",\n+ \"t6\".\"l_discount\",\n+ \"t6\".\"l_tax\",\n+ \"t6\".\"l_returnflag\",\n+ \"t6\".\"l_linestatus\",\n+ \"t6\".\"l_shipdate\",\n+ \"t6\".\"l_commitdate\",\n+ \"t6\".\"l_receiptdate\",\n+ \"t6\".\"l_shipinstruct\",\n+ \"t6\".\"l_shipmode\",\n+ \"t6\".\"l_comment\"\n FROM (\n SELECT\n- t3.c_custkey,\n- t3.c_name,\n- t3.c_address,\n- t3.c_nationkey,\n- t3.c_phone,\n- t3.c_acctbal,\n- t3.c_mktsegment,\n- t3.c_comment,\n- t4.o_orderkey,\n- t4.o_custkey,\n- t4.o_orderstatus,\n- t4.o_totalprice,\n- t4.o_orderdate,\n- t4.o_orderpriority,\n- t4.o_clerk,\n- t4.o_shippriority,\n- t4.o_comment,\n- t5.l_orderkey,\n- t5.l_partkey,\n- t5.l_suppkey,\n- t5.l_linenumber,\n- t5.l_quantity,\n- t5.l_extendedprice,\n- t5.l_discount,\n- t5.l_tax,\n- t5.l_returnflag,\n- t5.l_linestatus,\n- t5.l_shipdate,\n- t5.l_commitdate,\n- t5.l_receiptdate,\n- t5.l_shipinstruct,\n- t5.l_shipmode,\n- t5.l_comment\n- FROM customer AS t3\n- INNER JOIN orders AS t4\n- ON t3.c_custkey = t4.o_custkey\n- INNER JOIN lineitem AS t5\n- ON t5.l_orderkey = t4.o_orderkey\n- ) AS t6\n+ \"t3\".\"c_custkey\",\n+ \"t3\".\"c_name\",\n+ \"t3\".\"c_address\",\n+ \"t3\".\"c_nationkey\",\n+ \"t3\".\"c_phone\",\n+ \"t3\".\"c_acctbal\",\n+ \"t3\".\"c_mktsegment\",\n+ \"t3\".\"c_comment\",\n+ \"t4\".\"o_orderkey\",\n+ \"t4\".\"o_custkey\",\n+ \"t4\".\"o_orderstatus\",\n+ \"t4\".\"o_totalprice\",\n+ \"t4\".\"o_orderdate\",\n+ \"t4\".\"o_orderpriority\",\n+ \"t4\".\"o_clerk\",\n+ \"t4\".\"o_shippriority\",\n+ \"t4\".\"o_comment\",\n+ \"t5\".\"l_orderkey\",\n+ \"t5\".\"l_partkey\",\n+ \"t5\".\"l_suppkey\",\n+ \"t5\".\"l_linenumber\",\n+ \"t5\".\"l_quantity\",\n+ \"t5\".\"l_extendedprice\",\n+ \"t5\".\"l_discount\",\n+ \"t5\".\"l_tax\",\n+ \"t5\".\"l_returnflag\",\n+ \"t5\".\"l_linestatus\",\n+ \"t5\".\"l_shipdate\",\n+ \"t5\".\"l_commitdate\",\n+ \"t5\".\"l_receiptdate\",\n+ \"t5\".\"l_shipinstruct\",\n+ \"t5\".\"l_shipmode\",\n+ \"t5\".\"l_comment\"\n+ FROM \"customer\" AS \"t3\"\n+ INNER JOIN \"orders\" AS \"t4\"\n+ ON \"t3\".\"c_custkey\" = \"t4\".\"o_custkey\"\n+ INNER JOIN \"lineitem\" AS \"t5\"\n+ ON \"t5\".\"l_orderkey\" = \"t4\".\"o_orderkey\"\n+ ) AS \"t6\"\n WHERE\n- t6.c_mktsegment = 'BUILDING'\n- AND t6.o_orderdate < MAKE_DATE(1995, 3, 15)\n- AND t6.l_shipdate > MAKE_DATE(1995, 3, 15)\n- ) AS t7\n+ \"t6\".\"c_mktsegment\" = 'BUILDING'\n+ AND \"t6\".\"o_orderdate\" < MAKE_DATE(1995, 3, 15)\n+ AND \"t6\".\"l_shipdate\" > MAKE_DATE(1995, 3, 15)\n+ ) AS \"t7\"\n GROUP BY\n 1,\n 2,\n 3\n-) AS t8\n+) AS \"t8\"\n ORDER BY\n- t8.revenue DESC,\n- t8.o_orderdate ASC\n+ \"t8\".\"revenue\" DESC,\n+ \"t8\".\"o_orderdate\" ASC\n LIMIT 10\n\\ No newline at end of file\n", "h04.sql": "@@ -1,39 +1,40 @@\n SELECT\n- t4.o_orderpriority,\n- t4.order_count\n+ \"t4\".\"o_orderpriority\",\n+ \"t4\".\"order_count\"\n FROM (\n SELECT\n- t3.o_orderpriority,\n- COUNT(*) AS order_count\n+ \"t3\".\"o_orderpriority\",\n+ COUNT(*) AS \"order_count\"\n FROM (\n SELECT\n- t0.o_orderkey,\n- t0.o_custkey,\n- t0.o_orderstatus,\n- t0.o_totalprice,\n- t0.o_orderdate,\n- t0.o_orderpriority,\n- t0.o_clerk,\n- t0.o_shippriority,\n- t0.o_comment\n- FROM orders AS t0\n+ \"t0\".\"o_orderkey\",\n+ \"t0\".\"o_custkey\",\n+ \"t0\".\"o_orderstatus\",\n+ \"t0\".\"o_totalprice\",\n+ \"t0\".\"o_orderdate\",\n+ \"t0\".\"o_orderpriority\",\n+ \"t0\".\"o_clerk\",\n+ \"t0\".\"o_shippriority\",\n+ \"t0\".\"o_comment\"\n+ FROM \"orders\" AS \"t0\"\n WHERE\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM lineitem AS t1\n+ FROM \"lineitem\" AS \"t1\"\n WHERE\n (\n- t1.l_orderkey = t0.o_orderkey\n- ) AND (\n- t1.l_commitdate < t1.l_receiptdate\n+ \"t1\".\"l_orderkey\" = \"t0\".\"o_orderkey\"\n+ )\n+ AND (\n+ \"t1\".\"l_commitdate\" < \"t1\".\"l_receiptdate\"\n )\n )\n- AND t0.o_orderdate >= MAKE_DATE(1993, 7, 1)\n- AND t0.o_orderdate < MAKE_DATE(1993, 10, 1)\n- ) AS t3\n+ AND \"t0\".\"o_orderdate\" >= MAKE_DATE(1993, 7, 1)\n+ AND \"t0\".\"o_orderdate\" < MAKE_DATE(1993, 10, 1)\n+ ) AS \"t3\"\n GROUP BY\n 1\n-) AS t4\n+) AS \"t4\"\n ORDER BY\n- t4.o_orderpriority ASC\n\\ No newline at end of file\n+ \"t4\".\"o_orderpriority\" ASC\n\\ No newline at end of file\n", "h05.sql": "@@ -1,129 +1,129 @@\n SELECT\n- t14.n_name,\n- t14.revenue\n+ \"t14\".\"n_name\",\n+ \"t14\".\"revenue\"\n FROM (\n SELECT\n- t13.n_name,\n- SUM(t13.l_extendedprice * (\n- CAST(1 AS TINYINT) - t13.l_discount\n- )) AS revenue\n+ \"t13\".\"n_name\",\n+ SUM(\"t13\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t13\".\"l_discount\"\n+ )) AS \"revenue\"\n FROM (\n SELECT\n- t12.c_custkey,\n- t12.c_name,\n- t12.c_address,\n- t12.c_nationkey,\n- t12.c_phone,\n- t12.c_acctbal,\n- t12.c_mktsegment,\n- t12.c_comment,\n- t12.o_orderkey,\n- t12.o_custkey,\n- t12.o_orderstatus,\n- t12.o_totalprice,\n- t12.o_orderdate,\n- t12.o_orderpriority,\n- t12.o_clerk,\n- t12.o_shippriority,\n- t12.o_comment,\n- t12.l_orderkey,\n- t12.l_partkey,\n- t12.l_suppkey,\n- t12.l_linenumber,\n- t12.l_quantity,\n- t12.l_extendedprice,\n- t12.l_discount,\n- t12.l_tax,\n- t12.l_returnflag,\n- t12.l_linestatus,\n- t12.l_shipdate,\n- t12.l_commitdate,\n- t12.l_receiptdate,\n- t12.l_shipinstruct,\n- t12.l_shipmode,\n- t12.l_comment,\n- t12.s_suppkey,\n- t12.s_name,\n- t12.s_address,\n- t12.s_nationkey,\n- t12.s_phone,\n- t12.s_acctbal,\n- t12.s_comment,\n- t12.n_nationkey,\n- t12.n_name,\n- t12.n_regionkey,\n- t12.n_comment,\n- t12.r_regionkey,\n- t12.r_name,\n- t12.r_comment\n+ \"t12\".\"c_custkey\",\n+ \"t12\".\"c_name\",\n+ \"t12\".\"c_address\",\n+ \"t12\".\"c_nationkey\",\n+ \"t12\".\"c_phone\",\n+ \"t12\".\"c_acctbal\",\n+ \"t12\".\"c_mktsegment\",\n+ \"t12\".\"c_comment\",\n+ \"t12\".\"o_orderkey\",\n+ \"t12\".\"o_custkey\",\n+ \"t12\".\"o_orderstatus\",\n+ \"t12\".\"o_totalprice\",\n+ \"t12\".\"o_orderdate\",\n+ \"t12\".\"o_orderpriority\",\n+ \"t12\".\"o_clerk\",\n+ \"t12\".\"o_shippriority\",\n+ \"t12\".\"o_comment\",\n+ \"t12\".\"l_orderkey\",\n+ \"t12\".\"l_partkey\",\n+ \"t12\".\"l_suppkey\",\n+ \"t12\".\"l_linenumber\",\n+ \"t12\".\"l_quantity\",\n+ \"t12\".\"l_extendedprice\",\n+ \"t12\".\"l_discount\",\n+ \"t12\".\"l_tax\",\n+ \"t12\".\"l_returnflag\",\n+ \"t12\".\"l_linestatus\",\n+ \"t12\".\"l_shipdate\",\n+ \"t12\".\"l_commitdate\",\n+ \"t12\".\"l_receiptdate\",\n+ \"t12\".\"l_shipinstruct\",\n+ \"t12\".\"l_shipmode\",\n+ \"t12\".\"l_comment\",\n+ \"t12\".\"s_suppkey\",\n+ \"t12\".\"s_name\",\n+ \"t12\".\"s_address\",\n+ \"t12\".\"s_nationkey\",\n+ \"t12\".\"s_phone\",\n+ \"t12\".\"s_acctbal\",\n+ \"t12\".\"s_comment\",\n+ \"t12\".\"n_nationkey\",\n+ \"t12\".\"n_name\",\n+ \"t12\".\"n_regionkey\",\n+ \"t12\".\"n_comment\",\n+ \"t12\".\"r_regionkey\",\n+ \"t12\".\"r_name\",\n+ \"t12\".\"r_comment\"\n FROM (\n SELECT\n- t6.c_custkey,\n- t6.c_name,\n- t6.c_address,\n- t6.c_nationkey,\n- t6.c_phone,\n- t6.c_acctbal,\n- t6.c_mktsegment,\n- t6.c_comment,\n- t7.o_orderkey,\n- t7.o_custkey,\n- t7.o_orderstatus,\n- t7.o_totalprice,\n- t7.o_orderdate,\n- t7.o_orderpriority,\n- t7.o_clerk,\n- t7.o_shippriority,\n- t7.o_comment,\n- t8.l_orderkey,\n- t8.l_partkey,\n- t8.l_suppkey,\n- t8.l_linenumber,\n- t8.l_quantity,\n- t8.l_extendedprice,\n- t8.l_discount,\n- t8.l_tax,\n- t8.l_returnflag,\n- t8.l_linestatus,\n- t8.l_shipdate,\n- t8.l_commitdate,\n- t8.l_receiptdate,\n- t8.l_shipinstruct,\n- t8.l_shipmode,\n- t8.l_comment,\n- t9.s_suppkey,\n- t9.s_name,\n- t9.s_address,\n- t9.s_nationkey,\n- t9.s_phone,\n- t9.s_acctbal,\n- t9.s_comment,\n- t10.n_nationkey,\n- t10.n_name,\n- t10.n_regionkey,\n- t10.n_comment,\n- t11.r_regionkey,\n- t11.r_name,\n- t11.r_comment\n- FROM customer AS t6\n- INNER JOIN orders AS t7\n- ON t6.c_custkey = t7.o_custkey\n- INNER JOIN lineitem AS t8\n- ON t8.l_orderkey = t7.o_orderkey\n- INNER JOIN supplier AS t9\n- ON t8.l_suppkey = t9.s_suppkey\n- INNER JOIN nation AS t10\n- ON t6.c_nationkey = t9.s_nationkey AND t9.s_nationkey = t10.n_nationkey\n- INNER JOIN region AS t11\n- ON t10.n_regionkey = t11.r_regionkey\n- ) AS t12\n+ \"t6\".\"c_custkey\",\n+ \"t6\".\"c_name\",\n+ \"t6\".\"c_address\",\n+ \"t6\".\"c_nationkey\",\n+ \"t6\".\"c_phone\",\n+ \"t6\".\"c_acctbal\",\n+ \"t6\".\"c_mktsegment\",\n+ \"t6\".\"c_comment\",\n+ \"t7\".\"o_orderkey\",\n+ \"t7\".\"o_custkey\",\n+ \"t7\".\"o_orderstatus\",\n+ \"t7\".\"o_totalprice\",\n+ \"t7\".\"o_orderdate\",\n+ \"t7\".\"o_orderpriority\",\n+ \"t7\".\"o_clerk\",\n+ \"t7\".\"o_shippriority\",\n+ \"t7\".\"o_comment\",\n+ \"t8\".\"l_orderkey\",\n+ \"t8\".\"l_partkey\",\n+ \"t8\".\"l_suppkey\",\n+ \"t8\".\"l_linenumber\",\n+ \"t8\".\"l_quantity\",\n+ \"t8\".\"l_extendedprice\",\n+ \"t8\".\"l_discount\",\n+ \"t8\".\"l_tax\",\n+ \"t8\".\"l_returnflag\",\n+ \"t8\".\"l_linestatus\",\n+ \"t8\".\"l_shipdate\",\n+ \"t8\".\"l_commitdate\",\n+ \"t8\".\"l_receiptdate\",\n+ \"t8\".\"l_shipinstruct\",\n+ \"t8\".\"l_shipmode\",\n+ \"t8\".\"l_comment\",\n+ \"t9\".\"s_suppkey\",\n+ \"t9\".\"s_name\",\n+ \"t9\".\"s_address\",\n+ \"t9\".\"s_nationkey\",\n+ \"t9\".\"s_phone\",\n+ \"t9\".\"s_acctbal\",\n+ \"t9\".\"s_comment\",\n+ \"t10\".\"n_nationkey\",\n+ \"t10\".\"n_name\",\n+ \"t10\".\"n_regionkey\",\n+ \"t10\".\"n_comment\",\n+ \"t11\".\"r_regionkey\",\n+ \"t11\".\"r_name\",\n+ \"t11\".\"r_comment\"\n+ FROM \"customer\" AS \"t6\"\n+ INNER JOIN \"orders\" AS \"t7\"\n+ ON \"t6\".\"c_custkey\" = \"t7\".\"o_custkey\"\n+ INNER JOIN \"lineitem\" AS \"t8\"\n+ ON \"t8\".\"l_orderkey\" = \"t7\".\"o_orderkey\"\n+ INNER JOIN \"supplier\" AS \"t9\"\n+ ON \"t8\".\"l_suppkey\" = \"t9\".\"s_suppkey\"\n+ INNER JOIN \"nation\" AS \"t10\"\n+ ON \"t6\".\"c_nationkey\" = \"t9\".\"s_nationkey\" AND \"t9\".\"s_nationkey\" = \"t10\".\"n_nationkey\"\n+ INNER JOIN \"region\" AS \"t11\"\n+ ON \"t10\".\"n_regionkey\" = \"t11\".\"r_regionkey\"\n+ ) AS \"t12\"\n WHERE\n- t12.r_name = 'ASIA'\n- AND t12.o_orderdate >= MAKE_DATE(1994, 1, 1)\n- AND t12.o_orderdate < MAKE_DATE(1995, 1, 1)\n- ) AS t13\n+ \"t12\".\"r_name\" = 'ASIA'\n+ AND \"t12\".\"o_orderdate\" >= MAKE_DATE(1994, 1, 1)\n+ AND \"t12\".\"o_orderdate\" < MAKE_DATE(1995, 1, 1)\n+ ) AS \"t13\"\n GROUP BY\n 1\n-) AS t14\n+) AS \"t14\"\n ORDER BY\n- t14.revenue DESC\n\\ No newline at end of file\n+ \"t14\".\"revenue\" DESC\n\\ No newline at end of file\n", "h06.sql": "@@ -1,27 +1,27 @@\n SELECT\n- SUM(t1.l_extendedprice * t1.l_discount) AS revenue\n+ SUM(\"t1\".\"l_extendedprice\" * \"t1\".\"l_discount\") AS \"revenue\"\n FROM (\n SELECT\n- t0.l_orderkey,\n- t0.l_partkey,\n- t0.l_suppkey,\n- t0.l_linenumber,\n- t0.l_quantity,\n- t0.l_extendedprice,\n- t0.l_discount,\n- t0.l_tax,\n- t0.l_returnflag,\n- t0.l_linestatus,\n- t0.l_shipdate,\n- t0.l_commitdate,\n- t0.l_receiptdate,\n- t0.l_shipinstruct,\n- t0.l_shipmode,\n- t0.l_comment\n- FROM lineitem AS t0\n+ \"t0\".\"l_orderkey\",\n+ \"t0\".\"l_partkey\",\n+ \"t0\".\"l_suppkey\",\n+ \"t0\".\"l_linenumber\",\n+ \"t0\".\"l_quantity\",\n+ \"t0\".\"l_extendedprice\",\n+ \"t0\".\"l_discount\",\n+ \"t0\".\"l_tax\",\n+ \"t0\".\"l_returnflag\",\n+ \"t0\".\"l_linestatus\",\n+ \"t0\".\"l_shipdate\",\n+ \"t0\".\"l_commitdate\",\n+ \"t0\".\"l_receiptdate\",\n+ \"t0\".\"l_shipinstruct\",\n+ \"t0\".\"l_shipmode\",\n+ \"t0\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t0\"\n WHERE\n- t0.l_shipdate >= MAKE_DATE(1994, 1, 1)\n- AND t0.l_shipdate < MAKE_DATE(1995, 1, 1)\n- AND t0.l_discount BETWEEN CAST(0.05 AS DOUBLE) AND CAST(0.07 AS DOUBLE)\n- AND t0.l_quantity < CAST(24 AS TINYINT)\n-) AS t1\n\\ No newline at end of file\n+ \"t0\".\"l_shipdate\" >= MAKE_DATE(1994, 1, 1)\n+ AND \"t0\".\"l_shipdate\" < MAKE_DATE(1995, 1, 1)\n+ AND \"t0\".\"l_discount\" BETWEEN CAST(0.05 AS DOUBLE) AND CAST(0.07 AS DOUBLE)\n+ AND \"t0\".\"l_quantity\" < CAST(24 AS TINYINT)\n+) AS \"t1\"\n\\ No newline at end of file\n", "h07.sql": "@@ -1,71 +1,71 @@\n SELECT\n- t14.supp_nation,\n- t14.cust_nation,\n- t14.l_year,\n- t14.revenue\n+ \"t14\".\"supp_nation\",\n+ \"t14\".\"cust_nation\",\n+ \"t14\".\"l_year\",\n+ \"t14\".\"revenue\"\n FROM (\n SELECT\n- t13.supp_nation,\n- t13.cust_nation,\n- t13.l_year,\n- SUM(t13.volume) AS revenue\n+ \"t13\".\"supp_nation\",\n+ \"t13\".\"cust_nation\",\n+ \"t13\".\"l_year\",\n+ SUM(\"t13\".\"volume\") AS \"revenue\"\n FROM (\n SELECT\n- t12.supp_nation,\n- t12.cust_nation,\n- t12.l_shipdate,\n- t12.l_extendedprice,\n- t12.l_discount,\n- t12.l_year,\n- t12.volume\n+ \"t12\".\"supp_nation\",\n+ \"t12\".\"cust_nation\",\n+ \"t12\".\"l_shipdate\",\n+ \"t12\".\"l_extendedprice\",\n+ \"t12\".\"l_discount\",\n+ \"t12\".\"l_year\",\n+ \"t12\".\"volume\"\n FROM (\n SELECT\n- t9.n_name AS supp_nation,\n- t11.n_name AS cust_nation,\n- t6.l_shipdate,\n- t6.l_extendedprice,\n- t6.l_discount,\n- EXTRACT(year FROM t6.l_shipdate) AS l_year,\n- t6.l_extendedprice * (\n- CAST(1 AS TINYINT) - t6.l_discount\n- ) AS volume\n- FROM supplier AS t5\n- INNER JOIN lineitem AS t6\n- ON t5.s_suppkey = t6.l_suppkey\n- INNER JOIN orders AS t7\n- ON t7.o_orderkey = t6.l_orderkey\n- INNER JOIN customer AS t8\n- ON t8.c_custkey = t7.o_custkey\n- INNER JOIN nation AS t9\n- ON t5.s_nationkey = t9.n_nationkey\n- INNER JOIN nation AS t11\n- ON t8.c_nationkey = t11.n_nationkey\n- ) AS t12\n+ \"t9\".\"n_name\" AS \"supp_nation\",\n+ \"t11\".\"n_name\" AS \"cust_nation\",\n+ \"t6\".\"l_shipdate\",\n+ \"t6\".\"l_extendedprice\",\n+ \"t6\".\"l_discount\",\n+ EXTRACT(year FROM \"t6\".\"l_shipdate\") AS \"l_year\",\n+ \"t6\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t6\".\"l_discount\"\n+ ) AS \"volume\"\n+ FROM \"supplier\" AS \"t5\"\n+ INNER JOIN \"lineitem\" AS \"t6\"\n+ ON \"t5\".\"s_suppkey\" = \"t6\".\"l_suppkey\"\n+ INNER JOIN \"orders\" AS \"t7\"\n+ ON \"t7\".\"o_orderkey\" = \"t6\".\"l_orderkey\"\n+ INNER JOIN \"customer\" AS \"t8\"\n+ ON \"t8\".\"c_custkey\" = \"t7\".\"o_custkey\"\n+ INNER JOIN \"nation\" AS \"t9\"\n+ ON \"t5\".\"s_nationkey\" = \"t9\".\"n_nationkey\"\n+ INNER JOIN \"nation\" AS \"t11\"\n+ ON \"t8\".\"c_nationkey\" = \"t11\".\"n_nationkey\"\n+ ) AS \"t12\"\n WHERE\n (\n (\n (\n- t12.cust_nation = 'FRANCE'\n+ \"t12\".\"cust_nation\" = 'FRANCE'\n ) AND (\n- t12.supp_nation = 'GERMANY'\n+ \"t12\".\"supp_nation\" = 'GERMANY'\n )\n )\n OR (\n (\n- t12.cust_nation = 'GERMANY'\n+ \"t12\".\"cust_nation\" = 'GERMANY'\n ) AND (\n- t12.supp_nation = 'FRANCE'\n+ \"t12\".\"supp_nation\" = 'FRANCE'\n )\n )\n )\n- AND t12.l_shipdate BETWEEN MAKE_DATE(1995, 1, 1) AND MAKE_DATE(1996, 12, 31)\n- ) AS t13\n+ AND \"t12\".\"l_shipdate\" BETWEEN MAKE_DATE(1995, 1, 1) AND MAKE_DATE(1996, 12, 31)\n+ ) AS \"t13\"\n GROUP BY\n 1,\n 2,\n 3\n-) AS t14\n+) AS \"t14\"\n ORDER BY\n- t14.supp_nation ASC,\n- t14.cust_nation ASC,\n- t14.l_year ASC\n\\ No newline at end of file\n+ \"t14\".\"supp_nation\" ASC,\n+ \"t14\".\"cust_nation\" ASC,\n+ \"t14\".\"l_year\" ASC\n\\ No newline at end of file\n", "h08.sql": "@@ -1,52 +1,52 @@\n SELECT\n- t18.o_year,\n- t18.mkt_share\n+ \"t18\".\"o_year\",\n+ \"t18\".\"mkt_share\"\n FROM (\n SELECT\n- t17.o_year,\n- SUM(t17.nation_volume) / SUM(t17.volume) AS mkt_share\n+ \"t17\".\"o_year\",\n+ SUM(\"t17\".\"nation_volume\") / SUM(\"t17\".\"volume\") AS \"mkt_share\"\n FROM (\n SELECT\n- t16.o_year,\n- t16.volume,\n- t16.nation,\n- t16.r_name,\n- t16.o_orderdate,\n- t16.p_type,\n- CASE WHEN t16.nation = 'BRAZIL' THEN t16.volume ELSE CAST(0 AS TINYINT) END AS nation_volume\n+ \"t16\".\"o_year\",\n+ \"t16\".\"volume\",\n+ \"t16\".\"nation\",\n+ \"t16\".\"r_name\",\n+ \"t16\".\"o_orderdate\",\n+ \"t16\".\"p_type\",\n+ CASE WHEN \"t16\".\"nation\" = 'BRAZIL' THEN \"t16\".\"volume\" ELSE CAST(0 AS TINYINT) END AS \"nation_volume\"\n FROM (\n SELECT\n- EXTRACT(year FROM t10.o_orderdate) AS o_year,\n- t8.l_extendedprice * (\n- CAST(1 AS TINYINT) - t8.l_discount\n- ) AS volume,\n- t15.n_name AS nation,\n- t14.r_name,\n- t10.o_orderdate,\n- t7.p_type\n- FROM part AS t7\n- INNER JOIN lineitem AS t8\n- ON t7.p_partkey = t8.l_partkey\n- INNER JOIN supplier AS t9\n- ON t9.s_suppkey = t8.l_suppkey\n- INNER JOIN orders AS t10\n- ON t8.l_orderkey = t10.o_orderkey\n- INNER JOIN customer AS t11\n- ON t10.o_custkey = t11.c_custkey\n- INNER JOIN nation AS t12\n- ON t11.c_nationkey = t12.n_nationkey\n- INNER JOIN region AS t14\n- ON t12.n_regionkey = t14.r_regionkey\n- INNER JOIN nation AS t15\n- ON t9.s_nationkey = t15.n_nationkey\n- ) AS t16\n+ EXTRACT(year FROM \"t10\".\"o_orderdate\") AS \"o_year\",\n+ \"t8\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t8\".\"l_discount\"\n+ ) AS \"volume\",\n+ \"t15\".\"n_name\" AS \"nation\",\n+ \"t14\".\"r_name\",\n+ \"t10\".\"o_orderdate\",\n+ \"t7\".\"p_type\"\n+ FROM \"part\" AS \"t7\"\n+ INNER JOIN \"lineitem\" AS \"t8\"\n+ ON \"t7\".\"p_partkey\" = \"t8\".\"l_partkey\"\n+ INNER JOIN \"supplier\" AS \"t9\"\n+ ON \"t9\".\"s_suppkey\" = \"t8\".\"l_suppkey\"\n+ INNER JOIN \"orders\" AS \"t10\"\n+ ON \"t8\".\"l_orderkey\" = \"t10\".\"o_orderkey\"\n+ INNER JOIN \"customer\" AS \"t11\"\n+ ON \"t10\".\"o_custkey\" = \"t11\".\"c_custkey\"\n+ INNER JOIN \"nation\" AS \"t12\"\n+ ON \"t11\".\"c_nationkey\" = \"t12\".\"n_nationkey\"\n+ INNER JOIN \"region\" AS \"t14\"\n+ ON \"t12\".\"n_regionkey\" = \"t14\".\"r_regionkey\"\n+ INNER JOIN \"nation\" AS \"t15\"\n+ ON \"t9\".\"s_nationkey\" = \"t15\".\"n_nationkey\"\n+ ) AS \"t16\"\n WHERE\n- t16.r_name = 'AMERICA'\n- AND t16.o_orderdate BETWEEN MAKE_DATE(1995, 1, 1) AND MAKE_DATE(1996, 12, 31)\n- AND t16.p_type = 'ECONOMY ANODIZED STEEL'\n- ) AS t17\n+ \"t16\".\"r_name\" = 'AMERICA'\n+ AND \"t16\".\"o_orderdate\" BETWEEN MAKE_DATE(1995, 1, 1) AND MAKE_DATE(1996, 12, 31)\n+ AND \"t16\".\"p_type\" = 'ECONOMY ANODIZED STEEL'\n+ ) AS \"t17\"\n GROUP BY\n 1\n-) AS t18\n+) AS \"t18\"\n ORDER BY\n- t18.o_year ASC\n\\ No newline at end of file\n+ \"t18\".\"o_year\" ASC\n\\ No newline at end of file\n", "h09.sql": "@@ -1,49 +1,49 @@\n SELECT\n- t14.nation,\n- t14.o_year,\n- t14.sum_profit\n+ \"t14\".\"nation\",\n+ \"t14\".\"o_year\",\n+ \"t14\".\"sum_profit\"\n FROM (\n SELECT\n- t13.nation,\n- t13.o_year,\n- SUM(t13.amount) AS sum_profit\n+ \"t13\".\"nation\",\n+ \"t13\".\"o_year\",\n+ SUM(\"t13\".\"amount\") AS \"sum_profit\"\n FROM (\n SELECT\n- t12.amount,\n- t12.o_year,\n- t12.nation,\n- t12.p_name\n+ \"t12\".\"amount\",\n+ \"t12\".\"o_year\",\n+ \"t12\".\"nation\",\n+ \"t12\".\"p_name\"\n FROM (\n SELECT\n (\n- t6.l_extendedprice * (\n- CAST(1 AS TINYINT) - t6.l_discount\n+ \"t6\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t6\".\"l_discount\"\n )\n ) - (\n- t8.ps_supplycost * t6.l_quantity\n- ) AS amount,\n- EXTRACT(year FROM t10.o_orderdate) AS o_year,\n- t11.n_name AS nation,\n- t9.p_name\n- FROM lineitem AS t6\n- INNER JOIN supplier AS t7\n- ON t7.s_suppkey = t6.l_suppkey\n- INNER JOIN partsupp AS t8\n- ON t8.ps_suppkey = t6.l_suppkey AND t8.ps_partkey = t6.l_partkey\n- INNER JOIN part AS t9\n- ON t9.p_partkey = t6.l_partkey\n- INNER JOIN orders AS t10\n- ON t10.o_orderkey = t6.l_orderkey\n- INNER JOIN nation AS t11\n- ON t7.s_nationkey = t11.n_nationkey\n- ) AS t12\n+ \"t8\".\"ps_supplycost\" * \"t6\".\"l_quantity\"\n+ ) AS \"amount\",\n+ EXTRACT(year FROM \"t10\".\"o_orderdate\") AS \"o_year\",\n+ \"t11\".\"n_name\" AS \"nation\",\n+ \"t9\".\"p_name\"\n+ FROM \"lineitem\" AS \"t6\"\n+ INNER JOIN \"supplier\" AS \"t7\"\n+ ON \"t7\".\"s_suppkey\" = \"t6\".\"l_suppkey\"\n+ INNER JOIN \"partsupp\" AS \"t8\"\n+ ON \"t8\".\"ps_suppkey\" = \"t6\".\"l_suppkey\" AND \"t8\".\"ps_partkey\" = \"t6\".\"l_partkey\"\n+ INNER JOIN \"part\" AS \"t9\"\n+ ON \"t9\".\"p_partkey\" = \"t6\".\"l_partkey\"\n+ INNER JOIN \"orders\" AS \"t10\"\n+ ON \"t10\".\"o_orderkey\" = \"t6\".\"l_orderkey\"\n+ INNER JOIN \"nation\" AS \"t11\"\n+ ON \"t7\".\"s_nationkey\" = \"t11\".\"n_nationkey\"\n+ ) AS \"t12\"\n WHERE\n- t12.p_name LIKE '%green%'\n- ) AS t13\n+ \"t12\".\"p_name\" LIKE '%green%'\n+ ) AS \"t13\"\n GROUP BY\n 1,\n 2\n-) AS t14\n+) AS \"t14\"\n ORDER BY\n- t14.nation ASC,\n- t14.o_year DESC\n\\ No newline at end of file\n+ \"t14\".\"nation\" ASC,\n+ \"t14\".\"o_year\" DESC\n\\ No newline at end of file\n", "h10.sql": "@@ -1,115 +1,115 @@\n SELECT\n- t10.c_custkey,\n- t10.c_name,\n- t10.revenue,\n- t10.c_acctbal,\n- t10.n_name,\n- t10.c_address,\n- t10.c_phone,\n- t10.c_comment\n+ \"t10\".\"c_custkey\",\n+ \"t10\".\"c_name\",\n+ \"t10\".\"revenue\",\n+ \"t10\".\"c_acctbal\",\n+ \"t10\".\"n_name\",\n+ \"t10\".\"c_address\",\n+ \"t10\".\"c_phone\",\n+ \"t10\".\"c_comment\"\n FROM (\n SELECT\n- t9.c_custkey,\n- t9.c_name,\n- t9.c_acctbal,\n- t9.n_name,\n- t9.c_address,\n- t9.c_phone,\n- t9.c_comment,\n- SUM(t9.l_extendedprice * (\n- CAST(1 AS TINYINT) - t9.l_discount\n- )) AS revenue\n+ \"t9\".\"c_custkey\",\n+ \"t9\".\"c_name\",\n+ \"t9\".\"c_acctbal\",\n+ \"t9\".\"n_name\",\n+ \"t9\".\"c_address\",\n+ \"t9\".\"c_phone\",\n+ \"t9\".\"c_comment\",\n+ SUM(\"t9\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t9\".\"l_discount\"\n+ )) AS \"revenue\"\n FROM (\n SELECT\n- t8.c_custkey,\n- t8.c_name,\n- t8.c_address,\n- t8.c_nationkey,\n- t8.c_phone,\n- t8.c_acctbal,\n- t8.c_mktsegment,\n- t8.c_comment,\n- t8.o_orderkey,\n- t8.o_custkey,\n- t8.o_orderstatus,\n- t8.o_totalprice,\n- t8.o_orderdate,\n- t8.o_orderpriority,\n- t8.o_clerk,\n- t8.o_shippriority,\n- t8.o_comment,\n- t8.l_orderkey,\n- t8.l_partkey,\n- t8.l_suppkey,\n- t8.l_linenumber,\n- t8.l_quantity,\n- t8.l_extendedprice,\n- t8.l_discount,\n- t8.l_tax,\n- t8.l_returnflag,\n- t8.l_linestatus,\n- t8.l_shipdate,\n- t8.l_commitdate,\n- t8.l_receiptdate,\n- t8.l_shipinstruct,\n- t8.l_shipmode,\n- t8.l_comment,\n- t8.n_nationkey,\n- t8.n_name,\n- t8.n_regionkey,\n- t8.n_comment\n+ \"t8\".\"c_custkey\",\n+ \"t8\".\"c_name\",\n+ \"t8\".\"c_address\",\n+ \"t8\".\"c_nationkey\",\n+ \"t8\".\"c_phone\",\n+ \"t8\".\"c_acctbal\",\n+ \"t8\".\"c_mktsegment\",\n+ \"t8\".\"c_comment\",\n+ \"t8\".\"o_orderkey\",\n+ \"t8\".\"o_custkey\",\n+ \"t8\".\"o_orderstatus\",\n+ \"t8\".\"o_totalprice\",\n+ \"t8\".\"o_orderdate\",\n+ \"t8\".\"o_orderpriority\",\n+ \"t8\".\"o_clerk\",\n+ \"t8\".\"o_shippriority\",\n+ \"t8\".\"o_comment\",\n+ \"t8\".\"l_orderkey\",\n+ \"t8\".\"l_partkey\",\n+ \"t8\".\"l_suppkey\",\n+ \"t8\".\"l_linenumber\",\n+ \"t8\".\"l_quantity\",\n+ \"t8\".\"l_extendedprice\",\n+ \"t8\".\"l_discount\",\n+ \"t8\".\"l_tax\",\n+ \"t8\".\"l_returnflag\",\n+ \"t8\".\"l_linestatus\",\n+ \"t8\".\"l_shipdate\",\n+ \"t8\".\"l_commitdate\",\n+ \"t8\".\"l_receiptdate\",\n+ \"t8\".\"l_shipinstruct\",\n+ \"t8\".\"l_shipmode\",\n+ \"t8\".\"l_comment\",\n+ \"t8\".\"n_nationkey\",\n+ \"t8\".\"n_name\",\n+ \"t8\".\"n_regionkey\",\n+ \"t8\".\"n_comment\"\n FROM (\n SELECT\n- t4.c_custkey,\n- t4.c_name,\n- t4.c_address,\n- t4.c_nationkey,\n- t4.c_phone,\n- t4.c_acctbal,\n- t4.c_mktsegment,\n- t4.c_comment,\n- t5.o_orderkey,\n- t5.o_custkey,\n- t5.o_orderstatus,\n- t5.o_totalprice,\n- t5.o_orderdate,\n- t5.o_orderpriority,\n- t5.o_clerk,\n- t5.o_shippriority,\n- t5.o_comment,\n- t6.l_orderkey,\n- t6.l_partkey,\n- t6.l_suppkey,\n- t6.l_linenumber,\n- t6.l_quantity,\n- t6.l_extendedprice,\n- t6.l_discount,\n- t6.l_tax,\n- t6.l_returnflag,\n- t6.l_linestatus,\n- t6.l_shipdate,\n- t6.l_commitdate,\n- t6.l_receiptdate,\n- t6.l_shipinstruct,\n- t6.l_shipmode,\n- t6.l_comment,\n- t7.n_nationkey,\n- t7.n_name,\n- t7.n_regionkey,\n- t7.n_comment\n- FROM customer AS t4\n- INNER JOIN orders AS t5\n- ON t4.c_custkey = t5.o_custkey\n- INNER JOIN lineitem AS t6\n- ON t6.l_orderkey = t5.o_orderkey\n- INNER JOIN nation AS t7\n- ON t4.c_nationkey = t7.n_nationkey\n- ) AS t8\n+ \"t4\".\"c_custkey\",\n+ \"t4\".\"c_name\",\n+ \"t4\".\"c_address\",\n+ \"t4\".\"c_nationkey\",\n+ \"t4\".\"c_phone\",\n+ \"t4\".\"c_acctbal\",\n+ \"t4\".\"c_mktsegment\",\n+ \"t4\".\"c_comment\",\n+ \"t5\".\"o_orderkey\",\n+ \"t5\".\"o_custkey\",\n+ \"t5\".\"o_orderstatus\",\n+ \"t5\".\"o_totalprice\",\n+ \"t5\".\"o_orderdate\",\n+ \"t5\".\"o_orderpriority\",\n+ \"t5\".\"o_clerk\",\n+ \"t5\".\"o_shippriority\",\n+ \"t5\".\"o_comment\",\n+ \"t6\".\"l_orderkey\",\n+ \"t6\".\"l_partkey\",\n+ \"t6\".\"l_suppkey\",\n+ \"t6\".\"l_linenumber\",\n+ \"t6\".\"l_quantity\",\n+ \"t6\".\"l_extendedprice\",\n+ \"t6\".\"l_discount\",\n+ \"t6\".\"l_tax\",\n+ \"t6\".\"l_returnflag\",\n+ \"t6\".\"l_linestatus\",\n+ \"t6\".\"l_shipdate\",\n+ \"t6\".\"l_commitdate\",\n+ \"t6\".\"l_receiptdate\",\n+ \"t6\".\"l_shipinstruct\",\n+ \"t6\".\"l_shipmode\",\n+ \"t6\".\"l_comment\",\n+ \"t7\".\"n_nationkey\",\n+ \"t7\".\"n_name\",\n+ \"t7\".\"n_regionkey\",\n+ \"t7\".\"n_comment\"\n+ FROM \"customer\" AS \"t4\"\n+ INNER JOIN \"orders\" AS \"t5\"\n+ ON \"t4\".\"c_custkey\" = \"t5\".\"o_custkey\"\n+ INNER JOIN \"lineitem\" AS \"t6\"\n+ ON \"t6\".\"l_orderkey\" = \"t5\".\"o_orderkey\"\n+ INNER JOIN \"nation\" AS \"t7\"\n+ ON \"t4\".\"c_nationkey\" = \"t7\".\"n_nationkey\"\n+ ) AS \"t8\"\n WHERE\n- t8.o_orderdate >= MAKE_DATE(1993, 10, 1)\n- AND t8.o_orderdate < MAKE_DATE(1994, 1, 1)\n- AND t8.l_returnflag = 'R'\n- ) AS t9\n+ \"t8\".\"o_orderdate\" >= MAKE_DATE(1993, 10, 1)\n+ AND \"t8\".\"o_orderdate\" < MAKE_DATE(1994, 1, 1)\n+ AND \"t8\".\"l_returnflag\" = 'R'\n+ ) AS \"t9\"\n GROUP BY\n 1,\n 2,\n@@ -118,7 +118,7 @@ FROM (\n 5,\n 6,\n 7\n-) AS t10\n+) AS \"t10\"\n ORDER BY\n- t10.revenue DESC\n+ \"t10\".\"revenue\" DESC\n LIMIT 20\n\\ No newline at end of file\n", "h11.sql": "@@ -1,109 +1,109 @@\n SELECT\n- t8.ps_partkey,\n- t8.value\n+ \"t8\".\"ps_partkey\",\n+ \"t8\".\"value\"\n FROM (\n SELECT\n- t7.ps_partkey,\n- SUM(t7.ps_supplycost * t7.ps_availqty) AS value\n+ \"t7\".\"ps_partkey\",\n+ SUM(\"t7\".\"ps_supplycost\" * \"t7\".\"ps_availqty\") AS \"value\"\n FROM (\n SELECT\n- t6.ps_partkey,\n- t6.ps_suppkey,\n- t6.ps_availqty,\n- t6.ps_supplycost,\n- t6.ps_comment,\n- t6.s_suppkey,\n- t6.s_name,\n- t6.s_address,\n- t6.s_nationkey,\n- t6.s_phone,\n- t6.s_acctbal,\n- t6.s_comment,\n- t6.n_nationkey,\n- t6.n_name,\n- t6.n_regionkey,\n- t6.n_comment\n+ \"t6\".\"ps_partkey\",\n+ \"t6\".\"ps_suppkey\",\n+ \"t6\".\"ps_availqty\",\n+ \"t6\".\"ps_supplycost\",\n+ \"t6\".\"ps_comment\",\n+ \"t6\".\"s_suppkey\",\n+ \"t6\".\"s_name\",\n+ \"t6\".\"s_address\",\n+ \"t6\".\"s_nationkey\",\n+ \"t6\".\"s_phone\",\n+ \"t6\".\"s_acctbal\",\n+ \"t6\".\"s_comment\",\n+ \"t6\".\"n_nationkey\",\n+ \"t6\".\"n_name\",\n+ \"t6\".\"n_regionkey\",\n+ \"t6\".\"n_comment\"\n FROM (\n SELECT\n- t3.ps_partkey,\n- t3.ps_suppkey,\n- t3.ps_availqty,\n- t3.ps_supplycost,\n- t3.ps_comment,\n- t4.s_suppkey,\n- t4.s_name,\n- t4.s_address,\n- t4.s_nationkey,\n- t4.s_phone,\n- t4.s_acctbal,\n- t4.s_comment,\n- t5.n_nationkey,\n- t5.n_name,\n- t5.n_regionkey,\n- t5.n_comment\n- FROM partsupp AS t3\n- INNER JOIN supplier AS t4\n- ON t3.ps_suppkey = t4.s_suppkey\n- INNER JOIN nation AS t5\n- ON t5.n_nationkey = t4.s_nationkey\n- ) AS t6\n+ \"t3\".\"ps_partkey\",\n+ \"t3\".\"ps_suppkey\",\n+ \"t3\".\"ps_availqty\",\n+ \"t3\".\"ps_supplycost\",\n+ \"t3\".\"ps_comment\",\n+ \"t4\".\"s_suppkey\",\n+ \"t4\".\"s_name\",\n+ \"t4\".\"s_address\",\n+ \"t4\".\"s_nationkey\",\n+ \"t4\".\"s_phone\",\n+ \"t4\".\"s_acctbal\",\n+ \"t4\".\"s_comment\",\n+ \"t5\".\"n_nationkey\",\n+ \"t5\".\"n_name\",\n+ \"t5\".\"n_regionkey\",\n+ \"t5\".\"n_comment\"\n+ FROM \"partsupp\" AS \"t3\"\n+ INNER JOIN \"supplier\" AS \"t4\"\n+ ON \"t3\".\"ps_suppkey\" = \"t4\".\"s_suppkey\"\n+ INNER JOIN \"nation\" AS \"t5\"\n+ ON \"t5\".\"n_nationkey\" = \"t4\".\"s_nationkey\"\n+ ) AS \"t6\"\n WHERE\n- t6.n_name = 'GERMANY'\n- ) AS t7\n+ \"t6\".\"n_name\" = 'GERMANY'\n+ ) AS \"t7\"\n GROUP BY\n 1\n-) AS t8\n+) AS \"t8\"\n WHERE\n- t8.value > (\n+ \"t8\".\"value\" > (\n (\n SELECT\n- SUM(t7.ps_supplycost * t7.ps_availqty) AS \"Sum(Multiply(ps_supplycost, ps_availqty))\"\n+ SUM(\"t7\".\"ps_supplycost\" * \"t7\".\"ps_availqty\") AS \"Sum(Multiply(ps_supplycost, ps_availqty))\"\n FROM (\n SELECT\n- t6.ps_partkey,\n- t6.ps_suppkey,\n- t6.ps_availqty,\n- t6.ps_supplycost,\n- t6.ps_comment,\n- t6.s_suppkey,\n- t6.s_name,\n- t6.s_address,\n- t6.s_nationkey,\n- t6.s_phone,\n- t6.s_acctbal,\n- t6.s_comment,\n- t6.n_nationkey,\n- t6.n_name,\n- t6.n_regionkey,\n- t6.n_comment\n+ \"t6\".\"ps_partkey\",\n+ \"t6\".\"ps_suppkey\",\n+ \"t6\".\"ps_availqty\",\n+ \"t6\".\"ps_supplycost\",\n+ \"t6\".\"ps_comment\",\n+ \"t6\".\"s_suppkey\",\n+ \"t6\".\"s_name\",\n+ \"t6\".\"s_address\",\n+ \"t6\".\"s_nationkey\",\n+ \"t6\".\"s_phone\",\n+ \"t6\".\"s_acctbal\",\n+ \"t6\".\"s_comment\",\n+ \"t6\".\"n_nationkey\",\n+ \"t6\".\"n_name\",\n+ \"t6\".\"n_regionkey\",\n+ \"t6\".\"n_comment\"\n FROM (\n SELECT\n- t3.ps_partkey,\n- t3.ps_suppkey,\n- t3.ps_availqty,\n- t3.ps_supplycost,\n- t3.ps_comment,\n- t4.s_suppkey,\n- t4.s_name,\n- t4.s_address,\n- t4.s_nationkey,\n- t4.s_phone,\n- t4.s_acctbal,\n- t4.s_comment,\n- t5.n_nationkey,\n- t5.n_name,\n- t5.n_regionkey,\n- t5.n_comment\n- FROM partsupp AS t3\n- INNER JOIN supplier AS t4\n- ON t3.ps_suppkey = t4.s_suppkey\n- INNER JOIN nation AS t5\n- ON t5.n_nationkey = t4.s_nationkey\n- ) AS t6\n+ \"t3\".\"ps_partkey\",\n+ \"t3\".\"ps_suppkey\",\n+ \"t3\".\"ps_availqty\",\n+ \"t3\".\"ps_supplycost\",\n+ \"t3\".\"ps_comment\",\n+ \"t4\".\"s_suppkey\",\n+ \"t4\".\"s_name\",\n+ \"t4\".\"s_address\",\n+ \"t4\".\"s_nationkey\",\n+ \"t4\".\"s_phone\",\n+ \"t4\".\"s_acctbal\",\n+ \"t4\".\"s_comment\",\n+ \"t5\".\"n_nationkey\",\n+ \"t5\".\"n_name\",\n+ \"t5\".\"n_regionkey\",\n+ \"t5\".\"n_comment\"\n+ FROM \"partsupp\" AS \"t3\"\n+ INNER JOIN \"supplier\" AS \"t4\"\n+ ON \"t3\".\"ps_suppkey\" = \"t4\".\"s_suppkey\"\n+ INNER JOIN \"nation\" AS \"t5\"\n+ ON \"t5\".\"n_nationkey\" = \"t4\".\"s_nationkey\"\n+ ) AS \"t6\"\n WHERE\n- t6.n_name = 'GERMANY'\n- ) AS t7\n+ \"t6\".\"n_name\" = 'GERMANY'\n+ ) AS \"t7\"\n ) * CAST(0.0001 AS DOUBLE)\n )\n ORDER BY\n- t8.value DESC\n\\ No newline at end of file\n+ \"t8\".\"value\" DESC\n\\ No newline at end of file\n", "h12.sql": "@@ -1,95 +1,95 @@\n SELECT\n- t6.l_shipmode,\n- t6.high_line_count,\n- t6.low_line_count\n+ \"t6\".\"l_shipmode\",\n+ \"t6\".\"high_line_count\",\n+ \"t6\".\"low_line_count\"\n FROM (\n SELECT\n- t5.l_shipmode,\n+ \"t5\".\"l_shipmode\",\n SUM(\n- CASE t5.o_orderpriority\n+ CASE \"t5\".\"o_orderpriority\"\n WHEN '1-URGENT'\n THEN CAST(1 AS TINYINT)\n WHEN '2-HIGH'\n THEN CAST(1 AS TINYINT)\n ELSE CAST(0 AS TINYINT)\n END\n- ) AS high_line_count,\n+ ) AS \"high_line_count\",\n SUM(\n- CASE t5.o_orderpriority\n+ CASE \"t5\".\"o_orderpriority\"\n WHEN '1-URGENT'\n THEN CAST(0 AS TINYINT)\n WHEN '2-HIGH'\n THEN CAST(0 AS TINYINT)\n ELSE CAST(1 AS TINYINT)\n END\n- ) AS low_line_count\n+ ) AS \"low_line_count\"\n FROM (\n SELECT\n- t4.o_orderkey,\n- t4.o_custkey,\n- t4.o_orderstatus,\n- t4.o_totalprice,\n- t4.o_orderdate,\n- t4.o_orderpriority,\n- t4.o_clerk,\n- t4.o_shippriority,\n- t4.o_comment,\n- t4.l_orderkey,\n- t4.l_partkey,\n- t4.l_suppkey,\n- t4.l_linenumber,\n- t4.l_quantity,\n- t4.l_extendedprice,\n- t4.l_discount,\n- t4.l_tax,\n- t4.l_returnflag,\n- t4.l_linestatus,\n- t4.l_shipdate,\n- t4.l_commitdate,\n- t4.l_receiptdate,\n- t4.l_shipinstruct,\n- t4.l_shipmode,\n- t4.l_comment\n+ \"t4\".\"o_orderkey\",\n+ \"t4\".\"o_custkey\",\n+ \"t4\".\"o_orderstatus\",\n+ \"t4\".\"o_totalprice\",\n+ \"t4\".\"o_orderdate\",\n+ \"t4\".\"o_orderpriority\",\n+ \"t4\".\"o_clerk\",\n+ \"t4\".\"o_shippriority\",\n+ \"t4\".\"o_comment\",\n+ \"t4\".\"l_orderkey\",\n+ \"t4\".\"l_partkey\",\n+ \"t4\".\"l_suppkey\",\n+ \"t4\".\"l_linenumber\",\n+ \"t4\".\"l_quantity\",\n+ \"t4\".\"l_extendedprice\",\n+ \"t4\".\"l_discount\",\n+ \"t4\".\"l_tax\",\n+ \"t4\".\"l_returnflag\",\n+ \"t4\".\"l_linestatus\",\n+ \"t4\".\"l_shipdate\",\n+ \"t4\".\"l_commitdate\",\n+ \"t4\".\"l_receiptdate\",\n+ \"t4\".\"l_shipinstruct\",\n+ \"t4\".\"l_shipmode\",\n+ \"t4\".\"l_comment\"\n FROM (\n SELECT\n- t2.o_orderkey,\n- t2.o_custkey,\n- t2.o_orderstatus,\n- t2.o_totalprice,\n- t2.o_orderdate,\n- t2.o_orderpriority,\n- t2.o_clerk,\n- t2.o_shippriority,\n- t2.o_comment,\n- t3.l_orderkey,\n- t3.l_partkey,\n- t3.l_suppkey,\n- t3.l_linenumber,\n- t3.l_quantity,\n- t3.l_extendedprice,\n- t3.l_discount,\n- t3.l_tax,\n- t3.l_returnflag,\n- t3.l_linestatus,\n- t3.l_shipdate,\n- t3.l_commitdate,\n- t3.l_receiptdate,\n- t3.l_shipinstruct,\n- t3.l_shipmode,\n- t3.l_comment\n- FROM orders AS t2\n- INNER JOIN lineitem AS t3\n- ON t2.o_orderkey = t3.l_orderkey\n- ) AS t4\n+ \"t2\".\"o_orderkey\",\n+ \"t2\".\"o_custkey\",\n+ \"t2\".\"o_orderstatus\",\n+ \"t2\".\"o_totalprice\",\n+ \"t2\".\"o_orderdate\",\n+ \"t2\".\"o_orderpriority\",\n+ \"t2\".\"o_clerk\",\n+ \"t2\".\"o_shippriority\",\n+ \"t2\".\"o_comment\",\n+ \"t3\".\"l_orderkey\",\n+ \"t3\".\"l_partkey\",\n+ \"t3\".\"l_suppkey\",\n+ \"t3\".\"l_linenumber\",\n+ \"t3\".\"l_quantity\",\n+ \"t3\".\"l_extendedprice\",\n+ \"t3\".\"l_discount\",\n+ \"t3\".\"l_tax\",\n+ \"t3\".\"l_returnflag\",\n+ \"t3\".\"l_linestatus\",\n+ \"t3\".\"l_shipdate\",\n+ \"t3\".\"l_commitdate\",\n+ \"t3\".\"l_receiptdate\",\n+ \"t3\".\"l_shipinstruct\",\n+ \"t3\".\"l_shipmode\",\n+ \"t3\".\"l_comment\"\n+ FROM \"orders\" AS \"t2\"\n+ INNER JOIN \"lineitem\" AS \"t3\"\n+ ON \"t2\".\"o_orderkey\" = \"t3\".\"l_orderkey\"\n+ ) AS \"t4\"\n WHERE\n- t4.l_shipmode IN ('MAIL', 'SHIP')\n- AND t4.l_commitdate < t4.l_receiptdate\n- AND t4.l_shipdate < t4.l_commitdate\n- AND t4.l_receiptdate >= MAKE_DATE(1994, 1, 1)\n- AND t4.l_receiptdate < MAKE_DATE(1995, 1, 1)\n- ) AS t5\n+ \"t4\".\"l_shipmode\" IN ('MAIL', 'SHIP')\n+ AND \"t4\".\"l_commitdate\" < \"t4\".\"l_receiptdate\"\n+ AND \"t4\".\"l_shipdate\" < \"t4\".\"l_commitdate\"\n+ AND \"t4\".\"l_receiptdate\" >= MAKE_DATE(1994, 1, 1)\n+ AND \"t4\".\"l_receiptdate\" < MAKE_DATE(1995, 1, 1)\n+ ) AS \"t5\"\n GROUP BY\n 1\n-) AS t6\n+) AS \"t6\"\n ORDER BY\n- t6.l_shipmode ASC\n\\ No newline at end of file\n+ \"t6\".\"l_shipmode\" ASC\n\\ No newline at end of file\n", "h13.sql": "@@ -1,45 +1,46 @@\n SELECT\n- t6.c_count,\n- t6.custdist\n+ \"t6\".\"c_count\",\n+ \"t6\".\"custdist\"\n FROM (\n SELECT\n- t5.c_count,\n- COUNT(*) AS custdist\n+ \"t5\".\"c_count\",\n+ COUNT(*) AS \"custdist\"\n FROM (\n SELECT\n- t4.c_custkey,\n- COUNT(t4.o_orderkey) AS c_count\n+ \"t4\".\"c_custkey\",\n+ COUNT(\"t4\".\"o_orderkey\") AS \"c_count\"\n FROM (\n SELECT\n- t2.c_custkey,\n- t2.c_name,\n- t2.c_address,\n- t2.c_nationkey,\n- t2.c_phone,\n- t2.c_acctbal,\n- t2.c_mktsegment,\n- t2.c_comment,\n- t3.o_orderkey,\n- t3.o_custkey,\n- t3.o_orderstatus,\n- t3.o_totalprice,\n- t3.o_orderdate,\n- t3.o_orderpriority,\n- t3.o_clerk,\n- t3.o_shippriority,\n- t3.o_comment\n- FROM customer AS t2\n- LEFT OUTER JOIN orders AS t3\n- ON t2.c_custkey = t3.o_custkey AND NOT (\n- t3.o_comment LIKE '%special%requests%'\n+ \"t2\".\"c_custkey\",\n+ \"t2\".\"c_name\",\n+ \"t2\".\"c_address\",\n+ \"t2\".\"c_nationkey\",\n+ \"t2\".\"c_phone\",\n+ \"t2\".\"c_acctbal\",\n+ \"t2\".\"c_mktsegment\",\n+ \"t2\".\"c_comment\",\n+ \"t3\".\"o_orderkey\",\n+ \"t3\".\"o_custkey\",\n+ \"t3\".\"o_orderstatus\",\n+ \"t3\".\"o_totalprice\",\n+ \"t3\".\"o_orderdate\",\n+ \"t3\".\"o_orderpriority\",\n+ \"t3\".\"o_clerk\",\n+ \"t3\".\"o_shippriority\",\n+ \"t3\".\"o_comment\"\n+ FROM \"customer\" AS \"t2\"\n+ LEFT OUTER JOIN \"orders\" AS \"t3\"\n+ ON \"t2\".\"c_custkey\" = \"t3\".\"o_custkey\"\n+ AND NOT (\n+ \"t3\".\"o_comment\" LIKE '%special%requests%'\n )\n- ) AS t4\n+ ) AS \"t4\"\n GROUP BY\n 1\n- ) AS t5\n+ ) AS \"t5\"\n GROUP BY\n 1\n-) AS t6\n+) AS \"t6\"\n ORDER BY\n- t6.custdist DESC,\n- t6.c_count DESC\n\\ No newline at end of file\n+ \"t6\".\"custdist\" DESC,\n+ \"t6\".\"c_count\" DESC\n\\ No newline at end of file\n", "h14.sql": "@@ -2,74 +2,75 @@ SELECT\n (\n SUM(\n CASE\n- WHEN t5.p_type LIKE 'PROMO%'\n- THEN t5.l_extendedprice * (\n- CAST(1 AS TINYINT) - t5.l_discount\n+ WHEN \"t5\".\"p_type\" LIKE 'PROMO%'\n+ THEN \"t5\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t5\".\"l_discount\"\n )\n ELSE CAST(0 AS TINYINT)\n END\n ) * CAST(100 AS TINYINT)\n- ) / SUM(t5.l_extendedprice * (\n- CAST(1 AS TINYINT) - t5.l_discount\n- )) AS promo_revenue\n+ ) / SUM(\"t5\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t5\".\"l_discount\"\n+ )) AS \"promo_revenue\"\n FROM (\n SELECT\n- t4.l_orderkey,\n- t4.l_partkey,\n- t4.l_suppkey,\n- t4.l_linenumber,\n- t4.l_quantity,\n- t4.l_extendedprice,\n- t4.l_discount,\n- t4.l_tax,\n- t4.l_returnflag,\n- t4.l_linestatus,\n- t4.l_shipdate,\n- t4.l_commitdate,\n- t4.l_receiptdate,\n- t4.l_shipinstruct,\n- t4.l_shipmode,\n- t4.l_comment,\n- t4.p_partkey,\n- t4.p_name,\n- t4.p_mfgr,\n- t4.p_brand,\n- t4.p_type,\n- t4.p_size,\n- t4.p_container,\n- t4.p_retailprice,\n- t4.p_comment\n+ \"t4\".\"l_orderkey\",\n+ \"t4\".\"l_partkey\",\n+ \"t4\".\"l_suppkey\",\n+ \"t4\".\"l_linenumber\",\n+ \"t4\".\"l_quantity\",\n+ \"t4\".\"l_extendedprice\",\n+ \"t4\".\"l_discount\",\n+ \"t4\".\"l_tax\",\n+ \"t4\".\"l_returnflag\",\n+ \"t4\".\"l_linestatus\",\n+ \"t4\".\"l_shipdate\",\n+ \"t4\".\"l_commitdate\",\n+ \"t4\".\"l_receiptdate\",\n+ \"t4\".\"l_shipinstruct\",\n+ \"t4\".\"l_shipmode\",\n+ \"t4\".\"l_comment\",\n+ \"t4\".\"p_partkey\",\n+ \"t4\".\"p_name\",\n+ \"t4\".\"p_mfgr\",\n+ \"t4\".\"p_brand\",\n+ \"t4\".\"p_type\",\n+ \"t4\".\"p_size\",\n+ \"t4\".\"p_container\",\n+ \"t4\".\"p_retailprice\",\n+ \"t4\".\"p_comment\"\n FROM (\n SELECT\n- t2.l_orderkey,\n- t2.l_partkey,\n- t2.l_suppkey,\n- t2.l_linenumber,\n- t2.l_quantity,\n- t2.l_extendedprice,\n- t2.l_discount,\n- t2.l_tax,\n- t2.l_returnflag,\n- t2.l_linestatus,\n- t2.l_shipdate,\n- t2.l_commitdate,\n- t2.l_receiptdate,\n- t2.l_shipinstruct,\n- t2.l_shipmode,\n- t2.l_comment,\n- t3.p_partkey,\n- t3.p_name,\n- t3.p_mfgr,\n- t3.p_brand,\n- t3.p_type,\n- t3.p_size,\n- t3.p_container,\n- t3.p_retailprice,\n- t3.p_comment\n- FROM lineitem AS t2\n- INNER JOIN part AS t3\n- ON t2.l_partkey = t3.p_partkey\n- ) AS t4\n+ \"t2\".\"l_orderkey\",\n+ \"t2\".\"l_partkey\",\n+ \"t2\".\"l_suppkey\",\n+ \"t2\".\"l_linenumber\",\n+ \"t2\".\"l_quantity\",\n+ \"t2\".\"l_extendedprice\",\n+ \"t2\".\"l_discount\",\n+ \"t2\".\"l_tax\",\n+ \"t2\".\"l_returnflag\",\n+ \"t2\".\"l_linestatus\",\n+ \"t2\".\"l_shipdate\",\n+ \"t2\".\"l_commitdate\",\n+ \"t2\".\"l_receiptdate\",\n+ \"t2\".\"l_shipinstruct\",\n+ \"t2\".\"l_shipmode\",\n+ \"t2\".\"l_comment\",\n+ \"t3\".\"p_partkey\",\n+ \"t3\".\"p_name\",\n+ \"t3\".\"p_mfgr\",\n+ \"t3\".\"p_brand\",\n+ \"t3\".\"p_type\",\n+ \"t3\".\"p_size\",\n+ \"t3\".\"p_container\",\n+ \"t3\".\"p_retailprice\",\n+ \"t3\".\"p_comment\"\n+ FROM \"lineitem\" AS \"t2\"\n+ INNER JOIN \"part\" AS \"t3\"\n+ ON \"t2\".\"l_partkey\" = \"t3\".\"p_partkey\"\n+ ) AS \"t4\"\n WHERE\n- t4.l_shipdate >= MAKE_DATE(1995, 9, 1) AND t4.l_shipdate < MAKE_DATE(1995, 10, 1)\n-) AS t5\n\\ No newline at end of file\n+ \"t4\".\"l_shipdate\" >= MAKE_DATE(1995, 9, 1)\n+ AND \"t4\".\"l_shipdate\" < MAKE_DATE(1995, 10, 1)\n+) AS \"t5\"\n\\ No newline at end of file\n", "h15.sql": "@@ -1,103 +1,105 @@\n SELECT\n- t6.s_suppkey,\n- t6.s_name,\n- t6.s_address,\n- t6.s_phone,\n- t6.total_revenue\n+ \"t6\".\"s_suppkey\",\n+ \"t6\".\"s_name\",\n+ \"t6\".\"s_address\",\n+ \"t6\".\"s_phone\",\n+ \"t6\".\"total_revenue\"\n FROM (\n SELECT\n- t2.s_suppkey,\n- t2.s_name,\n- t2.s_address,\n- t2.s_nationkey,\n- t2.s_phone,\n- t2.s_acctbal,\n- t2.s_comment,\n- t5.l_suppkey,\n- t5.total_revenue\n- FROM supplier AS t2\n+ \"t2\".\"s_suppkey\",\n+ \"t2\".\"s_name\",\n+ \"t2\".\"s_address\",\n+ \"t2\".\"s_nationkey\",\n+ \"t2\".\"s_phone\",\n+ \"t2\".\"s_acctbal\",\n+ \"t2\".\"s_comment\",\n+ \"t5\".\"l_suppkey\",\n+ \"t5\".\"total_revenue\"\n+ FROM \"supplier\" AS \"t2\"\n INNER JOIN (\n SELECT\n- t3.l_suppkey,\n- SUM(t3.l_extendedprice * (\n- CAST(1 AS TINYINT) - t3.l_discount\n- )) AS total_revenue\n+ \"t3\".\"l_suppkey\",\n+ SUM(\"t3\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t3\".\"l_discount\"\n+ )) AS \"total_revenue\"\n FROM (\n SELECT\n- t1.l_orderkey,\n- t1.l_partkey,\n- t1.l_suppkey,\n- t1.l_linenumber,\n- t1.l_quantity,\n- t1.l_extendedprice,\n- t1.l_discount,\n- t1.l_tax,\n- t1.l_returnflag,\n- t1.l_linestatus,\n- t1.l_shipdate,\n- t1.l_commitdate,\n- t1.l_receiptdate,\n- t1.l_shipinstruct,\n- t1.l_shipmode,\n- t1.l_comment\n- FROM lineitem AS t1\n+ \"t1\".\"l_orderkey\",\n+ \"t1\".\"l_partkey\",\n+ \"t1\".\"l_suppkey\",\n+ \"t1\".\"l_linenumber\",\n+ \"t1\".\"l_quantity\",\n+ \"t1\".\"l_extendedprice\",\n+ \"t1\".\"l_discount\",\n+ \"t1\".\"l_tax\",\n+ \"t1\".\"l_returnflag\",\n+ \"t1\".\"l_linestatus\",\n+ \"t1\".\"l_shipdate\",\n+ \"t1\".\"l_commitdate\",\n+ \"t1\".\"l_receiptdate\",\n+ \"t1\".\"l_shipinstruct\",\n+ \"t1\".\"l_shipmode\",\n+ \"t1\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t1\"\n WHERE\n- t1.l_shipdate >= MAKE_DATE(1996, 1, 1) AND t1.l_shipdate < MAKE_DATE(1996, 4, 1)\n- ) AS t3\n+ \"t1\".\"l_shipdate\" >= MAKE_DATE(1996, 1, 1)\n+ AND \"t1\".\"l_shipdate\" < MAKE_DATE(1996, 4, 1)\n+ ) AS \"t3\"\n GROUP BY\n 1\n- ) AS t5\n- ON t2.s_suppkey = t5.l_suppkey\n-) AS t6\n+ ) AS \"t5\"\n+ ON \"t2\".\"s_suppkey\" = \"t5\".\"l_suppkey\"\n+) AS \"t6\"\n WHERE\n- t6.total_revenue = (\n+ \"t6\".\"total_revenue\" = (\n SELECT\n- MAX(t6.total_revenue) AS \"Max(total_revenue)\"\n+ MAX(\"t6\".\"total_revenue\") AS \"Max(total_revenue)\"\n FROM (\n SELECT\n- t2.s_suppkey,\n- t2.s_name,\n- t2.s_address,\n- t2.s_nationkey,\n- t2.s_phone,\n- t2.s_acctbal,\n- t2.s_comment,\n- t5.l_suppkey,\n- t5.total_revenue\n- FROM supplier AS t2\n+ \"t2\".\"s_suppkey\",\n+ \"t2\".\"s_name\",\n+ \"t2\".\"s_address\",\n+ \"t2\".\"s_nationkey\",\n+ \"t2\".\"s_phone\",\n+ \"t2\".\"s_acctbal\",\n+ \"t2\".\"s_comment\",\n+ \"t5\".\"l_suppkey\",\n+ \"t5\".\"total_revenue\"\n+ FROM \"supplier\" AS \"t2\"\n INNER JOIN (\n SELECT\n- t3.l_suppkey,\n- SUM(t3.l_extendedprice * (\n- CAST(1 AS TINYINT) - t3.l_discount\n- )) AS total_revenue\n+ \"t3\".\"l_suppkey\",\n+ SUM(\"t3\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t3\".\"l_discount\"\n+ )) AS \"total_revenue\"\n FROM (\n SELECT\n- t1.l_orderkey,\n- t1.l_partkey,\n- t1.l_suppkey,\n- t1.l_linenumber,\n- t1.l_quantity,\n- t1.l_extendedprice,\n- t1.l_discount,\n- t1.l_tax,\n- t1.l_returnflag,\n- t1.l_linestatus,\n- t1.l_shipdate,\n- t1.l_commitdate,\n- t1.l_receiptdate,\n- t1.l_shipinstruct,\n- t1.l_shipmode,\n- t1.l_comment\n- FROM lineitem AS t1\n+ \"t1\".\"l_orderkey\",\n+ \"t1\".\"l_partkey\",\n+ \"t1\".\"l_suppkey\",\n+ \"t1\".\"l_linenumber\",\n+ \"t1\".\"l_quantity\",\n+ \"t1\".\"l_extendedprice\",\n+ \"t1\".\"l_discount\",\n+ \"t1\".\"l_tax\",\n+ \"t1\".\"l_returnflag\",\n+ \"t1\".\"l_linestatus\",\n+ \"t1\".\"l_shipdate\",\n+ \"t1\".\"l_commitdate\",\n+ \"t1\".\"l_receiptdate\",\n+ \"t1\".\"l_shipinstruct\",\n+ \"t1\".\"l_shipmode\",\n+ \"t1\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t1\"\n WHERE\n- t1.l_shipdate >= MAKE_DATE(1996, 1, 1) AND t1.l_shipdate < MAKE_DATE(1996, 4, 1)\n- ) AS t3\n+ \"t1\".\"l_shipdate\" >= MAKE_DATE(1996, 1, 1)\n+ AND \"t1\".\"l_shipdate\" < MAKE_DATE(1996, 4, 1)\n+ ) AS \"t3\"\n GROUP BY\n 1\n- ) AS t5\n- ON t2.s_suppkey = t5.l_suppkey\n- ) AS t6\n+ ) AS \"t5\"\n+ ON \"t2\".\"s_suppkey\" = \"t5\".\"l_suppkey\"\n+ ) AS \"t6\"\n )\n ORDER BY\n- t6.s_suppkey ASC\n\\ No newline at end of file\n+ \"t6\".\"s_suppkey\" ASC\n\\ No newline at end of file\n", "h16.sql": "@@ -1,73 +1,73 @@\n SELECT\n- t8.p_brand,\n- t8.p_type,\n- t8.p_size,\n- t8.supplier_cnt\n+ \"t8\".\"p_brand\",\n+ \"t8\".\"p_type\",\n+ \"t8\".\"p_size\",\n+ \"t8\".\"supplier_cnt\"\n FROM (\n SELECT\n- t7.p_brand,\n- t7.p_type,\n- t7.p_size,\n- COUNT(DISTINCT t7.ps_suppkey) AS supplier_cnt\n+ \"t7\".\"p_brand\",\n+ \"t7\".\"p_type\",\n+ \"t7\".\"p_size\",\n+ COUNT(DISTINCT \"t7\".\"ps_suppkey\") AS \"supplier_cnt\"\n FROM (\n SELECT\n- t6.ps_partkey,\n- t6.ps_suppkey,\n- t6.ps_availqty,\n- t6.ps_supplycost,\n- t6.ps_comment,\n- t6.p_partkey,\n- t6.p_name,\n- t6.p_mfgr,\n- t6.p_brand,\n- t6.p_type,\n- t6.p_size,\n- t6.p_container,\n- t6.p_retailprice,\n- t6.p_comment\n+ \"t6\".\"ps_partkey\",\n+ \"t6\".\"ps_suppkey\",\n+ \"t6\".\"ps_availqty\",\n+ \"t6\".\"ps_supplycost\",\n+ \"t6\".\"ps_comment\",\n+ \"t6\".\"p_partkey\",\n+ \"t6\".\"p_name\",\n+ \"t6\".\"p_mfgr\",\n+ \"t6\".\"p_brand\",\n+ \"t6\".\"p_type\",\n+ \"t6\".\"p_size\",\n+ \"t6\".\"p_container\",\n+ \"t6\".\"p_retailprice\",\n+ \"t6\".\"p_comment\"\n FROM (\n SELECT\n- t3.ps_partkey,\n- t3.ps_suppkey,\n- t3.ps_availqty,\n- t3.ps_supplycost,\n- t3.ps_comment,\n- t4.p_partkey,\n- t4.p_name,\n- t4.p_mfgr,\n- t4.p_brand,\n- t4.p_type,\n- t4.p_size,\n- t4.p_container,\n- t4.p_retailprice,\n- t4.p_comment\n- FROM partsupp AS t3\n- INNER JOIN part AS t4\n- ON t4.p_partkey = t3.ps_partkey\n- ) AS t6\n+ \"t3\".\"ps_partkey\",\n+ \"t3\".\"ps_suppkey\",\n+ \"t3\".\"ps_availqty\",\n+ \"t3\".\"ps_supplycost\",\n+ \"t3\".\"ps_comment\",\n+ \"t4\".\"p_partkey\",\n+ \"t4\".\"p_name\",\n+ \"t4\".\"p_mfgr\",\n+ \"t4\".\"p_brand\",\n+ \"t4\".\"p_type\",\n+ \"t4\".\"p_size\",\n+ \"t4\".\"p_container\",\n+ \"t4\".\"p_retailprice\",\n+ \"t4\".\"p_comment\"\n+ FROM \"partsupp\" AS \"t3\"\n+ INNER JOIN \"part\" AS \"t4\"\n+ ON \"t4\".\"p_partkey\" = \"t3\".\"ps_partkey\"\n+ ) AS \"t6\"\n WHERE\n- t6.p_brand <> 'Brand#45'\n+ \"t6\".\"p_brand\" <> 'Brand#45'\n AND NOT (\n- t6.p_type LIKE 'MEDIUM POLISHED%'\n+ \"t6\".\"p_type\" LIKE 'MEDIUM POLISHED%'\n )\n- AND t6.p_size IN (CAST(49 AS TINYINT), CAST(14 AS TINYINT), CAST(23 AS TINYINT), CAST(45 AS TINYINT), CAST(19 AS TINYINT), CAST(3 AS TINYINT), CAST(36 AS TINYINT), CAST(9 AS TINYINT))\n+ AND \"t6\".\"p_size\" IN (CAST(49 AS TINYINT), CAST(14 AS TINYINT), CAST(23 AS TINYINT), CAST(45 AS TINYINT), CAST(19 AS TINYINT), CAST(3 AS TINYINT), CAST(36 AS TINYINT), CAST(9 AS TINYINT))\n AND NOT (\n- t6.ps_suppkey IN (\n+ \"t6\".\"ps_suppkey\" IN (\n SELECT\n- t2.s_suppkey\n- FROM supplier AS t2\n+ \"t2\".\"s_suppkey\"\n+ FROM \"supplier\" AS \"t2\"\n WHERE\n- t2.s_comment LIKE '%Customer%Complaints%'\n+ \"t2\".\"s_comment\" LIKE '%Customer%Complaints%'\n )\n )\n- ) AS t7\n+ ) AS \"t7\"\n GROUP BY\n 1,\n 2,\n 3\n-) AS t8\n+) AS \"t8\"\n ORDER BY\n- t8.supplier_cnt DESC,\n- t8.p_brand ASC,\n- t8.p_type ASC,\n- t8.p_size ASC\n\\ No newline at end of file\n+ \"t8\".\"supplier_cnt\" DESC,\n+ \"t8\".\"p_brand\" ASC,\n+ \"t8\".\"p_type\" ASC,\n+ \"t8\".\"p_size\" ASC\n\\ No newline at end of file\n", "h17.sql": "@@ -1,92 +1,92 @@\n SELECT\n- SUM(t7.l_extendedprice) / CAST(7.0 AS DOUBLE) AS avg_yearly\n+ SUM(\"t7\".\"l_extendedprice\") / CAST(7.0 AS DOUBLE) AS \"avg_yearly\"\n FROM (\n SELECT\n- t4.l_orderkey,\n- t4.l_partkey,\n- t4.l_suppkey,\n- t4.l_linenumber,\n- t4.l_quantity,\n- t4.l_extendedprice,\n- t4.l_discount,\n- t4.l_tax,\n- t4.l_returnflag,\n- t4.l_linestatus,\n- t4.l_shipdate,\n- t4.l_commitdate,\n- t4.l_receiptdate,\n- t4.l_shipinstruct,\n- t4.l_shipmode,\n- t4.l_comment,\n- t4.p_partkey,\n- t4.p_name,\n- t4.p_mfgr,\n- t4.p_brand,\n- t4.p_type,\n- t4.p_size,\n- t4.p_container,\n- t4.p_retailprice,\n- t4.p_comment\n+ \"t4\".\"l_orderkey\",\n+ \"t4\".\"l_partkey\",\n+ \"t4\".\"l_suppkey\",\n+ \"t4\".\"l_linenumber\",\n+ \"t4\".\"l_quantity\",\n+ \"t4\".\"l_extendedprice\",\n+ \"t4\".\"l_discount\",\n+ \"t4\".\"l_tax\",\n+ \"t4\".\"l_returnflag\",\n+ \"t4\".\"l_linestatus\",\n+ \"t4\".\"l_shipdate\",\n+ \"t4\".\"l_commitdate\",\n+ \"t4\".\"l_receiptdate\",\n+ \"t4\".\"l_shipinstruct\",\n+ \"t4\".\"l_shipmode\",\n+ \"t4\".\"l_comment\",\n+ \"t4\".\"p_partkey\",\n+ \"t4\".\"p_name\",\n+ \"t4\".\"p_mfgr\",\n+ \"t4\".\"p_brand\",\n+ \"t4\".\"p_type\",\n+ \"t4\".\"p_size\",\n+ \"t4\".\"p_container\",\n+ \"t4\".\"p_retailprice\",\n+ \"t4\".\"p_comment\"\n FROM (\n SELECT\n- t2.l_orderkey,\n- t2.l_partkey,\n- t2.l_suppkey,\n- t2.l_linenumber,\n- t2.l_quantity,\n- t2.l_extendedprice,\n- t2.l_discount,\n- t2.l_tax,\n- t2.l_returnflag,\n- t2.l_linestatus,\n- t2.l_shipdate,\n- t2.l_commitdate,\n- t2.l_receiptdate,\n- t2.l_shipinstruct,\n- t2.l_shipmode,\n- t2.l_comment,\n- t3.p_partkey,\n- t3.p_name,\n- t3.p_mfgr,\n- t3.p_brand,\n- t3.p_type,\n- t3.p_size,\n- t3.p_container,\n- t3.p_retailprice,\n- t3.p_comment\n- FROM lineitem AS t2\n- INNER JOIN part AS t3\n- ON t3.p_partkey = t2.l_partkey\n- ) AS t4\n+ \"t2\".\"l_orderkey\",\n+ \"t2\".\"l_partkey\",\n+ \"t2\".\"l_suppkey\",\n+ \"t2\".\"l_linenumber\",\n+ \"t2\".\"l_quantity\",\n+ \"t2\".\"l_extendedprice\",\n+ \"t2\".\"l_discount\",\n+ \"t2\".\"l_tax\",\n+ \"t2\".\"l_returnflag\",\n+ \"t2\".\"l_linestatus\",\n+ \"t2\".\"l_shipdate\",\n+ \"t2\".\"l_commitdate\",\n+ \"t2\".\"l_receiptdate\",\n+ \"t2\".\"l_shipinstruct\",\n+ \"t2\".\"l_shipmode\",\n+ \"t2\".\"l_comment\",\n+ \"t3\".\"p_partkey\",\n+ \"t3\".\"p_name\",\n+ \"t3\".\"p_mfgr\",\n+ \"t3\".\"p_brand\",\n+ \"t3\".\"p_type\",\n+ \"t3\".\"p_size\",\n+ \"t3\".\"p_container\",\n+ \"t3\".\"p_retailprice\",\n+ \"t3\".\"p_comment\"\n+ FROM \"lineitem\" AS \"t2\"\n+ INNER JOIN \"part\" AS \"t3\"\n+ ON \"t3\".\"p_partkey\" = \"t2\".\"l_partkey\"\n+ ) AS \"t4\"\n WHERE\n- t4.p_brand = 'Brand#23'\n- AND t4.p_container = 'MED BOX'\n- AND t4.l_quantity < (\n+ \"t4\".\"p_brand\" = 'Brand#23'\n+ AND \"t4\".\"p_container\" = 'MED BOX'\n+ AND \"t4\".\"l_quantity\" < (\n (\n SELECT\n- AVG(t5.l_quantity) AS \"Mean(l_quantity)\"\n+ AVG(\"t5\".\"l_quantity\") AS \"Mean(l_quantity)\"\n FROM (\n SELECT\n- t0.l_orderkey,\n- t0.l_partkey,\n- t0.l_suppkey,\n- t0.l_linenumber,\n- t0.l_quantity,\n- t0.l_extendedprice,\n- t0.l_discount,\n- t0.l_tax,\n- t0.l_returnflag,\n- t0.l_linestatus,\n- t0.l_shipdate,\n- t0.l_commitdate,\n- t0.l_receiptdate,\n- t0.l_shipinstruct,\n- t0.l_shipmode,\n- t0.l_comment\n- FROM lineitem AS t0\n+ \"t0\".\"l_orderkey\",\n+ \"t0\".\"l_partkey\",\n+ \"t0\".\"l_suppkey\",\n+ \"t0\".\"l_linenumber\",\n+ \"t0\".\"l_quantity\",\n+ \"t0\".\"l_extendedprice\",\n+ \"t0\".\"l_discount\",\n+ \"t0\".\"l_tax\",\n+ \"t0\".\"l_returnflag\",\n+ \"t0\".\"l_linestatus\",\n+ \"t0\".\"l_shipdate\",\n+ \"t0\".\"l_commitdate\",\n+ \"t0\".\"l_receiptdate\",\n+ \"t0\".\"l_shipinstruct\",\n+ \"t0\".\"l_shipmode\",\n+ \"t0\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t0\"\n WHERE\n- t0.l_partkey = t4.p_partkey\n- ) AS t5\n+ \"t0\".\"l_partkey\" = \"t4\".\"p_partkey\"\n+ ) AS \"t5\"\n ) * CAST(0.2 AS DOUBLE)\n )\n-) AS t7\n\\ No newline at end of file\n+) AS \"t7\"\n\\ No newline at end of file\n", "h18.sql": "@@ -1,118 +1,118 @@\n SELECT\n- t10.c_name,\n- t10.c_custkey,\n- t10.o_orderkey,\n- t10.o_orderdate,\n- t10.o_totalprice,\n- t10.sum_qty\n+ \"t10\".\"c_name\",\n+ \"t10\".\"c_custkey\",\n+ \"t10\".\"o_orderkey\",\n+ \"t10\".\"o_orderdate\",\n+ \"t10\".\"o_totalprice\",\n+ \"t10\".\"sum_qty\"\n FROM (\n SELECT\n- t9.c_name,\n- t9.c_custkey,\n- t9.o_orderkey,\n- t9.o_orderdate,\n- t9.o_totalprice,\n- SUM(t9.l_quantity) AS sum_qty\n+ \"t9\".\"c_name\",\n+ \"t9\".\"c_custkey\",\n+ \"t9\".\"o_orderkey\",\n+ \"t9\".\"o_orderdate\",\n+ \"t9\".\"o_totalprice\",\n+ SUM(\"t9\".\"l_quantity\") AS \"sum_qty\"\n FROM (\n SELECT\n- t7.c_custkey,\n- t7.c_name,\n- t7.c_address,\n- t7.c_nationkey,\n- t7.c_phone,\n- t7.c_acctbal,\n- t7.c_mktsegment,\n- t7.c_comment,\n- t7.o_orderkey,\n- t7.o_custkey,\n- t7.o_orderstatus,\n- t7.o_totalprice,\n- t7.o_orderdate,\n- t7.o_orderpriority,\n- t7.o_clerk,\n- t7.o_shippriority,\n- t7.o_comment,\n- t7.l_orderkey,\n- t7.l_partkey,\n- t7.l_suppkey,\n- t7.l_linenumber,\n- t7.l_quantity,\n- t7.l_extendedprice,\n- t7.l_discount,\n- t7.l_tax,\n- t7.l_returnflag,\n- t7.l_linestatus,\n- t7.l_shipdate,\n- t7.l_commitdate,\n- t7.l_receiptdate,\n- t7.l_shipinstruct,\n- t7.l_shipmode,\n- t7.l_comment\n+ \"t7\".\"c_custkey\",\n+ \"t7\".\"c_name\",\n+ \"t7\".\"c_address\",\n+ \"t7\".\"c_nationkey\",\n+ \"t7\".\"c_phone\",\n+ \"t7\".\"c_acctbal\",\n+ \"t7\".\"c_mktsegment\",\n+ \"t7\".\"c_comment\",\n+ \"t7\".\"o_orderkey\",\n+ \"t7\".\"o_custkey\",\n+ \"t7\".\"o_orderstatus\",\n+ \"t7\".\"o_totalprice\",\n+ \"t7\".\"o_orderdate\",\n+ \"t7\".\"o_orderpriority\",\n+ \"t7\".\"o_clerk\",\n+ \"t7\".\"o_shippriority\",\n+ \"t7\".\"o_comment\",\n+ \"t7\".\"l_orderkey\",\n+ \"t7\".\"l_partkey\",\n+ \"t7\".\"l_suppkey\",\n+ \"t7\".\"l_linenumber\",\n+ \"t7\".\"l_quantity\",\n+ \"t7\".\"l_extendedprice\",\n+ \"t7\".\"l_discount\",\n+ \"t7\".\"l_tax\",\n+ \"t7\".\"l_returnflag\",\n+ \"t7\".\"l_linestatus\",\n+ \"t7\".\"l_shipdate\",\n+ \"t7\".\"l_commitdate\",\n+ \"t7\".\"l_receiptdate\",\n+ \"t7\".\"l_shipinstruct\",\n+ \"t7\".\"l_shipmode\",\n+ \"t7\".\"l_comment\"\n FROM (\n SELECT\n- t3.c_custkey,\n- t3.c_name,\n- t3.c_address,\n- t3.c_nationkey,\n- t3.c_phone,\n- t3.c_acctbal,\n- t3.c_mktsegment,\n- t3.c_comment,\n- t4.o_orderkey,\n- t4.o_custkey,\n- t4.o_orderstatus,\n- t4.o_totalprice,\n- t4.o_orderdate,\n- t4.o_orderpriority,\n- t4.o_clerk,\n- t4.o_shippriority,\n- t4.o_comment,\n- t5.l_orderkey,\n- t5.l_partkey,\n- t5.l_suppkey,\n- t5.l_linenumber,\n- t5.l_quantity,\n- t5.l_extendedprice,\n- t5.l_discount,\n- t5.l_tax,\n- t5.l_returnflag,\n- t5.l_linestatus,\n- t5.l_shipdate,\n- t5.l_commitdate,\n- t5.l_receiptdate,\n- t5.l_shipinstruct,\n- t5.l_shipmode,\n- t5.l_comment\n- FROM customer AS t3\n- INNER JOIN orders AS t4\n- ON t3.c_custkey = t4.o_custkey\n- INNER JOIN lineitem AS t5\n- ON t4.o_orderkey = t5.l_orderkey\n- ) AS t7\n+ \"t3\".\"c_custkey\",\n+ \"t3\".\"c_name\",\n+ \"t3\".\"c_address\",\n+ \"t3\".\"c_nationkey\",\n+ \"t3\".\"c_phone\",\n+ \"t3\".\"c_acctbal\",\n+ \"t3\".\"c_mktsegment\",\n+ \"t3\".\"c_comment\",\n+ \"t4\".\"o_orderkey\",\n+ \"t4\".\"o_custkey\",\n+ \"t4\".\"o_orderstatus\",\n+ \"t4\".\"o_totalprice\",\n+ \"t4\".\"o_orderdate\",\n+ \"t4\".\"o_orderpriority\",\n+ \"t4\".\"o_clerk\",\n+ \"t4\".\"o_shippriority\",\n+ \"t4\".\"o_comment\",\n+ \"t5\".\"l_orderkey\",\n+ \"t5\".\"l_partkey\",\n+ \"t5\".\"l_suppkey\",\n+ \"t5\".\"l_linenumber\",\n+ \"t5\".\"l_quantity\",\n+ \"t5\".\"l_extendedprice\",\n+ \"t5\".\"l_discount\",\n+ \"t5\".\"l_tax\",\n+ \"t5\".\"l_returnflag\",\n+ \"t5\".\"l_linestatus\",\n+ \"t5\".\"l_shipdate\",\n+ \"t5\".\"l_commitdate\",\n+ \"t5\".\"l_receiptdate\",\n+ \"t5\".\"l_shipinstruct\",\n+ \"t5\".\"l_shipmode\",\n+ \"t5\".\"l_comment\"\n+ FROM \"customer\" AS \"t3\"\n+ INNER JOIN \"orders\" AS \"t4\"\n+ ON \"t3\".\"c_custkey\" = \"t4\".\"o_custkey\"\n+ INNER JOIN \"lineitem\" AS \"t5\"\n+ ON \"t4\".\"o_orderkey\" = \"t5\".\"l_orderkey\"\n+ ) AS \"t7\"\n WHERE\n- t7.o_orderkey IN (\n+ \"t7\".\"o_orderkey\" IN (\n SELECT\n- t6.l_orderkey\n+ \"t6\".\"l_orderkey\"\n FROM (\n SELECT\n- t2.l_orderkey,\n- SUM(t2.l_quantity) AS qty_sum\n- FROM lineitem AS t2\n+ \"t2\".\"l_orderkey\",\n+ SUM(\"t2\".\"l_quantity\") AS \"qty_sum\"\n+ FROM \"lineitem\" AS \"t2\"\n GROUP BY\n 1\n- ) AS t6\n+ ) AS \"t6\"\n WHERE\n- t6.qty_sum > CAST(300 AS SMALLINT)\n+ \"t6\".\"qty_sum\" > CAST(300 AS SMALLINT)\n )\n- ) AS t9\n+ ) AS \"t9\"\n GROUP BY\n 1,\n 2,\n 3,\n 4,\n 5\n-) AS t10\n+) AS \"t10\"\n ORDER BY\n- t10.o_totalprice DESC,\n- t10.o_orderdate ASC\n+ \"t10\".\"o_totalprice\" DESC,\n+ \"t10\".\"o_orderdate\" ASC\n LIMIT 100\n\\ No newline at end of file\n", "h19.sql": "@@ -1,65 +1,65 @@\n SELECT\n- SUM(t5.l_extendedprice * (\n- CAST(1 AS TINYINT) - t5.l_discount\n- )) AS revenue\n+ SUM(\"t5\".\"l_extendedprice\" * (\n+ CAST(1 AS TINYINT) - \"t5\".\"l_discount\"\n+ )) AS \"revenue\"\n FROM (\n SELECT\n- t4.l_orderkey,\n- t4.l_partkey,\n- t4.l_suppkey,\n- t4.l_linenumber,\n- t4.l_quantity,\n- t4.l_extendedprice,\n- t4.l_discount,\n- t4.l_tax,\n- t4.l_returnflag,\n- t4.l_linestatus,\n- t4.l_shipdate,\n- t4.l_commitdate,\n- t4.l_receiptdate,\n- t4.l_shipinstruct,\n- t4.l_shipmode,\n- t4.l_comment,\n- t4.p_partkey,\n- t4.p_name,\n- t4.p_mfgr,\n- t4.p_brand,\n- t4.p_type,\n- t4.p_size,\n- t4.p_container,\n- t4.p_retailprice,\n- t4.p_comment\n+ \"t4\".\"l_orderkey\",\n+ \"t4\".\"l_partkey\",\n+ \"t4\".\"l_suppkey\",\n+ \"t4\".\"l_linenumber\",\n+ \"t4\".\"l_quantity\",\n+ \"t4\".\"l_extendedprice\",\n+ \"t4\".\"l_discount\",\n+ \"t4\".\"l_tax\",\n+ \"t4\".\"l_returnflag\",\n+ \"t4\".\"l_linestatus\",\n+ \"t4\".\"l_shipdate\",\n+ \"t4\".\"l_commitdate\",\n+ \"t4\".\"l_receiptdate\",\n+ \"t4\".\"l_shipinstruct\",\n+ \"t4\".\"l_shipmode\",\n+ \"t4\".\"l_comment\",\n+ \"t4\".\"p_partkey\",\n+ \"t4\".\"p_name\",\n+ \"t4\".\"p_mfgr\",\n+ \"t4\".\"p_brand\",\n+ \"t4\".\"p_type\",\n+ \"t4\".\"p_size\",\n+ \"t4\".\"p_container\",\n+ \"t4\".\"p_retailprice\",\n+ \"t4\".\"p_comment\"\n FROM (\n SELECT\n- t2.l_orderkey,\n- t2.l_partkey,\n- t2.l_suppkey,\n- t2.l_linenumber,\n- t2.l_quantity,\n- t2.l_extendedprice,\n- t2.l_discount,\n- t2.l_tax,\n- t2.l_returnflag,\n- t2.l_linestatus,\n- t2.l_shipdate,\n- t2.l_commitdate,\n- t2.l_receiptdate,\n- t2.l_shipinstruct,\n- t2.l_shipmode,\n- t2.l_comment,\n- t3.p_partkey,\n- t3.p_name,\n- t3.p_mfgr,\n- t3.p_brand,\n- t3.p_type,\n- t3.p_size,\n- t3.p_container,\n- t3.p_retailprice,\n- t3.p_comment\n- FROM lineitem AS t2\n- INNER JOIN part AS t3\n- ON t3.p_partkey = t2.l_partkey\n- ) AS t4\n+ \"t2\".\"l_orderkey\",\n+ \"t2\".\"l_partkey\",\n+ \"t2\".\"l_suppkey\",\n+ \"t2\".\"l_linenumber\",\n+ \"t2\".\"l_quantity\",\n+ \"t2\".\"l_extendedprice\",\n+ \"t2\".\"l_discount\",\n+ \"t2\".\"l_tax\",\n+ \"t2\".\"l_returnflag\",\n+ \"t2\".\"l_linestatus\",\n+ \"t2\".\"l_shipdate\",\n+ \"t2\".\"l_commitdate\",\n+ \"t2\".\"l_receiptdate\",\n+ \"t2\".\"l_shipinstruct\",\n+ \"t2\".\"l_shipmode\",\n+ \"t2\".\"l_comment\",\n+ \"t3\".\"p_partkey\",\n+ \"t3\".\"p_name\",\n+ \"t3\".\"p_mfgr\",\n+ \"t3\".\"p_brand\",\n+ \"t3\".\"p_type\",\n+ \"t3\".\"p_size\",\n+ \"t3\".\"p_container\",\n+ \"t3\".\"p_retailprice\",\n+ \"t3\".\"p_comment\"\n+ FROM \"lineitem\" AS \"t2\"\n+ INNER JOIN \"part\" AS \"t3\"\n+ ON \"t3\".\"p_partkey\" = \"t2\".\"l_partkey\"\n+ ) AS \"t4\"\n WHERE\n (\n (\n@@ -69,24 +69,24 @@ FROM (\n (\n (\n (\n- t4.p_brand = 'Brand#12'\n+ \"t4\".\"p_brand\" = 'Brand#12'\n )\n- AND t4.p_container IN ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')\n+ AND \"t4\".\"p_container\" IN ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')\n )\n AND (\n- t4.l_quantity >= CAST(1 AS TINYINT)\n+ \"t4\".\"l_quantity\" >= CAST(1 AS TINYINT)\n )\n )\n AND (\n- t4.l_quantity <= CAST(11 AS TINYINT)\n+ \"t4\".\"l_quantity\" <= CAST(11 AS TINYINT)\n )\n )\n- AND t4.p_size BETWEEN CAST(1 AS TINYINT) AND CAST(5 AS TINYINT)\n+ AND \"t4\".\"p_size\" BETWEEN CAST(1 AS TINYINT) AND CAST(5 AS TINYINT)\n )\n- AND t4.l_shipmode IN ('AIR', 'AIR REG')\n+ AND \"t4\".\"l_shipmode\" IN ('AIR', 'AIR REG')\n )\n AND (\n- t4.l_shipinstruct = 'DELIVER IN PERSON'\n+ \"t4\".\"l_shipinstruct\" = 'DELIVER IN PERSON'\n )\n )\n OR (\n@@ -96,24 +96,24 @@ FROM (\n (\n (\n (\n- t4.p_brand = 'Brand#23'\n+ \"t4\".\"p_brand\" = 'Brand#23'\n )\n- AND t4.p_container IN ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')\n+ AND \"t4\".\"p_container\" IN ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')\n )\n AND (\n- t4.l_quantity >= CAST(10 AS TINYINT)\n+ \"t4\".\"l_quantity\" >= CAST(10 AS TINYINT)\n )\n )\n AND (\n- t4.l_quantity <= CAST(20 AS TINYINT)\n+ \"t4\".\"l_quantity\" <= CAST(20 AS TINYINT)\n )\n )\n- AND t4.p_size BETWEEN CAST(1 AS TINYINT) AND CAST(10 AS TINYINT)\n+ AND \"t4\".\"p_size\" BETWEEN CAST(1 AS TINYINT) AND CAST(10 AS TINYINT)\n )\n- AND t4.l_shipmode IN ('AIR', 'AIR REG')\n+ AND \"t4\".\"l_shipmode\" IN ('AIR', 'AIR REG')\n )\n AND (\n- t4.l_shipinstruct = 'DELIVER IN PERSON'\n+ \"t4\".\"l_shipinstruct\" = 'DELIVER IN PERSON'\n )\n )\n )\n@@ -124,24 +124,24 @@ FROM (\n (\n (\n (\n- t4.p_brand = 'Brand#34'\n+ \"t4\".\"p_brand\" = 'Brand#34'\n )\n- AND t4.p_container IN ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')\n+ AND \"t4\".\"p_container\" IN ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')\n )\n AND (\n- t4.l_quantity >= CAST(20 AS TINYINT)\n+ \"t4\".\"l_quantity\" >= CAST(20 AS TINYINT)\n )\n )\n AND (\n- t4.l_quantity <= CAST(30 AS TINYINT)\n+ \"t4\".\"l_quantity\" <= CAST(30 AS TINYINT)\n )\n )\n- AND t4.p_size BETWEEN CAST(1 AS TINYINT) AND CAST(15 AS TINYINT)\n+ AND \"t4\".\"p_size\" BETWEEN CAST(1 AS TINYINT) AND CAST(15 AS TINYINT)\n )\n- AND t4.l_shipmode IN ('AIR', 'AIR REG')\n+ AND \"t4\".\"l_shipmode\" IN ('AIR', 'AIR REG')\n )\n AND (\n- t4.l_shipinstruct = 'DELIVER IN PERSON'\n+ \"t4\".\"l_shipinstruct\" = 'DELIVER IN PERSON'\n )\n )\n-) AS t5\n\\ No newline at end of file\n+) AS \"t5\"\n\\ No newline at end of file\n", "h20.sql": "@@ -1,68 +1,68 @@\n SELECT\n- t9.s_name,\n- t9.s_address\n+ \"t9\".\"s_name\",\n+ \"t9\".\"s_address\"\n FROM (\n SELECT\n- t5.s_suppkey,\n- t5.s_name,\n- t5.s_address,\n- t5.s_nationkey,\n- t5.s_phone,\n- t5.s_acctbal,\n- t5.s_comment,\n- t6.n_nationkey,\n- t6.n_name,\n- t6.n_regionkey,\n- t6.n_comment\n- FROM supplier AS t5\n- INNER JOIN nation AS t6\n- ON t5.s_nationkey = t6.n_nationkey\n-) AS t9\n+ \"t5\".\"s_suppkey\",\n+ \"t5\".\"s_name\",\n+ \"t5\".\"s_address\",\n+ \"t5\".\"s_nationkey\",\n+ \"t5\".\"s_phone\",\n+ \"t5\".\"s_acctbal\",\n+ \"t5\".\"s_comment\",\n+ \"t6\".\"n_nationkey\",\n+ \"t6\".\"n_name\",\n+ \"t6\".\"n_regionkey\",\n+ \"t6\".\"n_comment\"\n+ FROM \"supplier\" AS \"t5\"\n+ INNER JOIN \"nation\" AS \"t6\"\n+ ON \"t5\".\"s_nationkey\" = \"t6\".\"n_nationkey\"\n+) AS \"t9\"\n WHERE\n- t9.n_name = 'CANADA'\n- AND t9.s_suppkey IN (\n+ \"t9\".\"n_name\" = 'CANADA'\n+ AND \"t9\".\"s_suppkey\" IN (\n SELECT\n- t1.ps_suppkey\n- FROM partsupp AS t1\n+ \"t1\".\"ps_suppkey\"\n+ FROM \"partsupp\" AS \"t1\"\n WHERE\n- t1.ps_partkey IN (\n+ \"t1\".\"ps_partkey\" IN (\n SELECT\n- t3.p_partkey\n- FROM part AS t3\n+ \"t3\".\"p_partkey\"\n+ FROM \"part\" AS \"t3\"\n WHERE\n- t3.p_name LIKE 'forest%'\n+ \"t3\".\"p_name\" LIKE 'forest%'\n )\n- AND t1.ps_availqty > (\n+ AND \"t1\".\"ps_availqty\" > (\n (\n SELECT\n- SUM(t8.l_quantity) AS \"Sum(l_quantity)\"\n+ SUM(\"t8\".\"l_quantity\") AS \"Sum(l_quantity)\"\n FROM (\n SELECT\n- t4.l_orderkey,\n- t4.l_partkey,\n- t4.l_suppkey,\n- t4.l_linenumber,\n- t4.l_quantity,\n- t4.l_extendedprice,\n- t4.l_discount,\n- t4.l_tax,\n- t4.l_returnflag,\n- t4.l_linestatus,\n- t4.l_shipdate,\n- t4.l_commitdate,\n- t4.l_receiptdate,\n- t4.l_shipinstruct,\n- t4.l_shipmode,\n- t4.l_comment\n- FROM lineitem AS t4\n+ \"t4\".\"l_orderkey\",\n+ \"t4\".\"l_partkey\",\n+ \"t4\".\"l_suppkey\",\n+ \"t4\".\"l_linenumber\",\n+ \"t4\".\"l_quantity\",\n+ \"t4\".\"l_extendedprice\",\n+ \"t4\".\"l_discount\",\n+ \"t4\".\"l_tax\",\n+ \"t4\".\"l_returnflag\",\n+ \"t4\".\"l_linestatus\",\n+ \"t4\".\"l_shipdate\",\n+ \"t4\".\"l_commitdate\",\n+ \"t4\".\"l_receiptdate\",\n+ \"t4\".\"l_shipinstruct\",\n+ \"t4\".\"l_shipmode\",\n+ \"t4\".\"l_comment\"\n+ FROM \"lineitem\" AS \"t4\"\n WHERE\n- t4.l_partkey = t1.ps_partkey\n- AND t4.l_suppkey = t1.ps_suppkey\n- AND t4.l_shipdate >= MAKE_DATE(1994, 1, 1)\n- AND t4.l_shipdate < MAKE_DATE(1995, 1, 1)\n- ) AS t8\n+ \"t4\".\"l_partkey\" = \"t1\".\"ps_partkey\"\n+ AND \"t4\".\"l_suppkey\" = \"t1\".\"ps_suppkey\"\n+ AND \"t4\".\"l_shipdate\" >= MAKE_DATE(1994, 1, 1)\n+ AND \"t4\".\"l_shipdate\" < MAKE_DATE(1995, 1, 1)\n+ ) AS \"t8\"\n ) * CAST(0.5 AS DOUBLE)\n )\n )\n ORDER BY\n- t9.s_name ASC\n\\ No newline at end of file\n+ \"t9\".\"s_name\" ASC\n\\ No newline at end of file\n", "h21.sql": "@@ -1,74 +1,76 @@\n SELECT\n- t14.s_name,\n- t14.numwait\n+ \"t14\".\"s_name\",\n+ \"t14\".\"numwait\"\n FROM (\n SELECT\n- t13.s_name,\n- COUNT(*) AS numwait\n+ \"t13\".\"s_name\",\n+ COUNT(*) AS \"numwait\"\n FROM (\n SELECT\n- t10.l1_orderkey,\n- t10.o_orderstatus,\n- t10.l_receiptdate,\n- t10.l_commitdate,\n- t10.l1_suppkey,\n- t10.s_name,\n- t10.n_name\n+ \"t10\".\"l1_orderkey\",\n+ \"t10\".\"o_orderstatus\",\n+ \"t10\".\"l_receiptdate\",\n+ \"t10\".\"l_commitdate\",\n+ \"t10\".\"l1_suppkey\",\n+ \"t10\".\"s_name\",\n+ \"t10\".\"n_name\"\n FROM (\n SELECT\n- t5.l_orderkey AS l1_orderkey,\n- t8.o_orderstatus,\n- t5.l_receiptdate,\n- t5.l_commitdate,\n- t5.l_suppkey AS l1_suppkey,\n- t4.s_name,\n- t9.n_name\n- FROM supplier AS t4\n- INNER JOIN lineitem AS t5\n- ON t4.s_suppkey = t5.l_suppkey\n- INNER JOIN orders AS t8\n- ON t8.o_orderkey = t5.l_orderkey\n- INNER JOIN nation AS t9\n- ON t4.s_nationkey = t9.n_nationkey\n- ) AS t10\n+ \"t5\".\"l_orderkey\" AS \"l1_orderkey\",\n+ \"t8\".\"o_orderstatus\",\n+ \"t5\".\"l_receiptdate\",\n+ \"t5\".\"l_commitdate\",\n+ \"t5\".\"l_suppkey\" AS \"l1_suppkey\",\n+ \"t4\".\"s_name\",\n+ \"t9\".\"n_name\"\n+ FROM \"supplier\" AS \"t4\"\n+ INNER JOIN \"lineitem\" AS \"t5\"\n+ ON \"t4\".\"s_suppkey\" = \"t5\".\"l_suppkey\"\n+ INNER JOIN \"orders\" AS \"t8\"\n+ ON \"t8\".\"o_orderkey\" = \"t5\".\"l_orderkey\"\n+ INNER JOIN \"nation\" AS \"t9\"\n+ ON \"t4\".\"s_nationkey\" = \"t9\".\"n_nationkey\"\n+ ) AS \"t10\"\n WHERE\n- t10.o_orderstatus = 'F'\n- AND t10.l_receiptdate > t10.l_commitdate\n- AND t10.n_name = 'SAUDI ARABIA'\n+ \"t10\".\"o_orderstatus\" = 'F'\n+ AND \"t10\".\"l_receiptdate\" > \"t10\".\"l_commitdate\"\n+ AND \"t10\".\"n_name\" = 'SAUDI ARABIA'\n AND EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM lineitem AS t6\n+ FROM \"lineitem\" AS \"t6\"\n WHERE\n (\n- t6.l_orderkey = t10.l1_orderkey\n- ) AND (\n- t6.l_suppkey <> t10.l1_suppkey\n+ \"t6\".\"l_orderkey\" = \"t10\".\"l1_orderkey\"\n+ )\n+ AND (\n+ \"t6\".\"l_suppkey\" <> \"t10\".\"l1_suppkey\"\n )\n )\n AND NOT (\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM lineitem AS t7\n+ FROM \"lineitem\" AS \"t7\"\n WHERE\n (\n (\n- t7.l_orderkey = t10.l1_orderkey\n- ) AND (\n- t7.l_suppkey <> t10.l1_suppkey\n+ \"t7\".\"l_orderkey\" = \"t10\".\"l1_orderkey\"\n+ )\n+ AND (\n+ \"t7\".\"l_suppkey\" <> \"t10\".\"l1_suppkey\"\n )\n )\n AND (\n- t7.l_receiptdate > t7.l_commitdate\n+ \"t7\".\"l_receiptdate\" > \"t7\".\"l_commitdate\"\n )\n )\n )\n- ) AS t13\n+ ) AS \"t13\"\n GROUP BY\n 1\n-) AS t14\n+) AS \"t14\"\n ORDER BY\n- t14.numwait DESC,\n- t14.s_name ASC\n+ \"t14\".\"numwait\" DESC,\n+ \"t14\".\"s_name\" ASC\n LIMIT 100\n\\ No newline at end of file\n", "h22.sql": "@@ -1,68 +1,68 @@\n SELECT\n- t6.cntrycode,\n- t6.numcust,\n- t6.totacctbal\n+ \"t6\".\"cntrycode\",\n+ \"t6\".\"numcust\",\n+ \"t6\".\"totacctbal\"\n FROM (\n SELECT\n- t5.cntrycode,\n- COUNT(*) AS numcust,\n- SUM(t5.c_acctbal) AS totacctbal\n+ \"t5\".\"cntrycode\",\n+ COUNT(*) AS \"numcust\",\n+ SUM(\"t5\".\"c_acctbal\") AS \"totacctbal\"\n FROM (\n SELECT\n CASE\n WHEN (\n CAST(0 AS TINYINT) + 1\n ) >= 1\n- THEN SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n- ELSE SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1 + LENGTH(t0.c_phone), CAST(2 AS TINYINT))\n- END AS cntrycode,\n- t0.c_acctbal\n- FROM customer AS t0\n+ THEN SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n+ ELSE SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1 + LENGTH(\"t0\".\"c_phone\"), CAST(2 AS TINYINT))\n+ END AS \"cntrycode\",\n+ \"t0\".\"c_acctbal\"\n+ FROM \"customer\" AS \"t0\"\n WHERE\n CASE\n WHEN (\n CAST(0 AS TINYINT) + 1\n ) >= 1\n- THEN SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n- ELSE SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1 + LENGTH(t0.c_phone), CAST(2 AS TINYINT))\n+ THEN SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n+ ELSE SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1 + LENGTH(\"t0\".\"c_phone\"), CAST(2 AS TINYINT))\n END IN ('13', '31', '23', '29', '30', '18', '17')\n- AND t0.c_acctbal > (\n+ AND \"t0\".\"c_acctbal\" > (\n SELECT\n- AVG(t3.c_acctbal) AS \"Mean(c_acctbal)\"\n+ AVG(\"t3\".\"c_acctbal\") AS \"Mean(c_acctbal)\"\n FROM (\n SELECT\n- t0.c_custkey,\n- t0.c_name,\n- t0.c_address,\n- t0.c_nationkey,\n- t0.c_phone,\n- t0.c_acctbal,\n- t0.c_mktsegment,\n- t0.c_comment\n- FROM customer AS t0\n+ \"t0\".\"c_custkey\",\n+ \"t0\".\"c_name\",\n+ \"t0\".\"c_address\",\n+ \"t0\".\"c_nationkey\",\n+ \"t0\".\"c_phone\",\n+ \"t0\".\"c_acctbal\",\n+ \"t0\".\"c_mktsegment\",\n+ \"t0\".\"c_comment\"\n+ FROM \"customer\" AS \"t0\"\n WHERE\n- t0.c_acctbal > CAST(0.0 AS DOUBLE)\n+ \"t0\".\"c_acctbal\" > CAST(0.0 AS DOUBLE)\n AND CASE\n WHEN (\n CAST(0 AS TINYINT) + 1\n ) >= 1\n- THEN SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n- ELSE SUBSTRING(t0.c_phone, CAST(0 AS TINYINT) + 1 + LENGTH(t0.c_phone), CAST(2 AS TINYINT))\n+ THEN SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1, CAST(2 AS TINYINT))\n+ ELSE SUBSTRING(\"t0\".\"c_phone\", CAST(0 AS TINYINT) + 1 + LENGTH(\"t0\".\"c_phone\"), CAST(2 AS TINYINT))\n END IN ('13', '31', '23', '29', '30', '18', '17')\n- ) AS t3\n+ ) AS \"t3\"\n )\n AND NOT (\n EXISTS(\n SELECT\n CAST(1 AS TINYINT) AS \"1\"\n- FROM orders AS t1\n+ FROM \"orders\" AS \"t1\"\n WHERE\n- t1.o_custkey = t0.c_custkey\n+ \"t1\".\"o_custkey\" = \"t0\".\"c_custkey\"\n )\n )\n- ) AS t5\n+ ) AS \"t5\"\n GROUP BY\n 1\n-) AS t6\n+) AS \"t6\"\n ORDER BY\n- t6.cntrycode ASC\n\\ No newline at end of file\n+ \"t6\".\"cntrycode\" ASC\n\\ No newline at end of file\n"}
feat(migrations): add `params` to the `Migration.execute` method Related: #4099
f280e6d0d39dcd5a087497c7d533bec79b32f7cb
feat
https://github.com/mikro-orm/mikro-orm/commit/f280e6d0d39dcd5a087497c7d533bec79b32f7cb
add `params` to the `Migration.execute` method Related: #4099
{"Migration.ts": "@@ -34,8 +34,12 @@ export abstract class Migration {\n this.ctx = ctx;\n }\n \n- async execute(sql: Query) {\n- return this.driver.execute(sql, undefined, 'all', this.ctx);\n+ /**\n+ * Executes a raw SQL query. Accepts a string SQL or a knex query builder instance.\n+ * The `params` parameter is respected only if you use string SQL in the first parameter.\n+ */\n+ async execute(sql: Query, params?: unknown[]) {\n+ return this.driver.execute(sql, params, 'all', this.ctx);\n }\n \n getKnex() {\n"}
fix(datafusion): ensure that non-matching re_search calls return bool values when patterns do not match
088b027ea4b223a7d602d3a72d8d794183b3f8ec
fix
https://github.com/ibis-project/ibis/commit/088b027ea4b223a7d602d3a72d8d794183b3f8ec
ensure that non-matching re_search calls return bool values when patterns do not match
{"values.py": "@@ -11,6 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n from ibis.backends.base.sqlglot import (\n+ FALSE,\n NULL,\n AggGen,\n F,\n@@ -441,7 +442,16 @@ def string_find(op, *, arg, substr, start, end, **_):\n \n @translate_val.register(ops.RegexSearch)\n def regex_search(op, *, arg, pattern, **_):\n- return F.array_length(F.regexp_match(arg, pattern)) > 0\n+ return if_(\n+ sg.or_(arg.is_(NULL), pattern.is_(NULL)),\n+ NULL,\n+ F.coalesce(\n+ # null is returned for non-matching patterns, so coalesce to false\n+ # because that is the desired behavior for ops.RegexSearch\n+ F.array_length(F.regexp_match(arg, pattern)) > 0,\n+ FALSE,\n+ ),\n+ )\n \n \n @translate_val.register(ops.StringContains)\n", "test_string.py": "@@ -2,6 +2,7 @@ from __future__ import annotations\n \n import contextlib\n \n+import numpy as np\n import pandas as pd\n import pytest\n import sqlalchemy as sa\n@@ -1090,4 +1091,6 @@ def test_no_conditional_percent_escape(con, expr):\n )\n def test_non_match_regex_search_is_false(con):\n expr = ibis.literal(\"foo\").re_search(\"bar\")\n- assert con.execute(expr) is False\n+ result = con.execute(expr)\n+ assert isinstance(result, (bool, np.bool_))\n+ assert not result\n"}
chore(deps): bump pyspark to 3.5 in poetry lock file
5fa088e88b53d46a858cf03503a831719cf78525
chore
https://github.com/rohankumardubey/ibis/commit/5fa088e88b53d46a858cf03503a831719cf78525
bump pyspark to 3.5 in poetry lock file
{"poetry.lock": "@@ -4483,13 +4483,13 @@ files = [\n \n [[package]]\n name = \"py4j\"\n-version = \"0.10.9.5\"\n+version = \"0.10.9.7\"\n description = \"Enables Python programs to dynamically access arbitrary Java objects\"\n optional = true\n python-versions = \"*\"\n files = [\n- {file = \"py4j-0.10.9.5-py2.py3-none-any.whl\", hash = \"sha256:52d171a6a2b031d8a5d1de6efe451cf4f5baff1a2819aabc3741c8406539ba04\"},\n- {file = \"py4j-0.10.9.5.tar.gz\", hash = \"sha256:276a4a3c5a2154df1860ef3303a927460e02e97b047dc0a47c1c3fb8cce34db6\"},\n+ {file = \"py4j-0.10.9.7-py2.py3-none-any.whl\", hash = \"sha256:85defdfd2b2376eb3abf5ca6474b51ab7e0de341c75a02f46dc9b5976f5a5c1b\"},\n+ {file = \"py4j-0.10.9.7.tar.gz\", hash = \"sha256:0b6e5315bb3ada5cf62ac651d107bb2ebc02def3dee9d9548e3baac644ea8dbb\"},\n ]\n \n [[package]]\n@@ -5044,22 +5044,23 @@ files = [\n \n [[package]]\n name = \"pyspark\"\n-version = \"3.3.4\"\n+version = \"3.5.0\"\n description = \"Apache Spark Python API\"\n optional = true\n-python-versions = \">=3.7\"\n+python-versions = \">=3.8\"\n files = [\n- {file = \"pyspark-3.3.4.tar.gz\", hash = \"sha256:1f866be47130a522355240949ed50d9812a8f327bd7619f043ffe07fbcf7f7b6\"},\n+ {file = \"pyspark-3.5.0.tar.gz\", hash = \"sha256:d41a9b76bd2aca370a6100d075c029e22ba44c5940927877e9435a3a9c566558\"},\n ]\n \n [package.dependencies]\n-py4j = \"0.10.9.5\"\n+py4j = \"0.10.9.7\"\n \n [package.extras]\n+connect = [\"googleapis-common-protos (>=1.56.4)\", \"grpcio (>=1.56.0)\", \"grpcio-status (>=1.56.0)\", \"numpy (>=1.15)\", \"pandas (>=1.0.5)\", \"pyarrow (>=4.0.0)\"]\n ml = [\"numpy (>=1.15)\"]\n mllib = [\"numpy (>=1.15)\"]\n-pandas-on-spark = [\"numpy (>=1.15)\", \"pandas (>=1.0.5)\", \"pyarrow (>=1.0.0)\"]\n-sql = [\"pandas (>=1.0.5)\", \"pyarrow (>=1.0.0)\"]\n+pandas-on-spark = [\"numpy (>=1.15)\", \"pandas (>=1.0.5)\", \"pyarrow (>=4.0.0)\"]\n+sql = [\"numpy (>=1.15)\", \"pandas (>=1.0.5)\", \"pyarrow (>=4.0.0)\"]\n \n [[package]]\n name = \"pystac\"\n", "requirements-dev.txt": "@@ -169,7 +169,7 @@ ptyprocess==0.7.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pure-eval==0.2.2 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pure-sasl==0.6.2 ; python_version >= \"3.9\" and python_version < \"4.0\"\n py-cpuinfo==9.0.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n-py4j==0.10.9.5 ; python_version >= \"3.9\" and python_version < \"4.0\"\n+py4j==0.10.9.7 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pyarrow-hotfix==0.6 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pyarrow==15.0.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pyasn1-modules==0.3.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n@@ -192,7 +192,7 @@ pyproj==3.6.1 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pyproject-hooks==1.0.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pyshp==2.3.1 ; python_version >= \"3.10\" and python_version < \"3.13\"\n pysocks==1.7.1 ; python_version >= \"3.10\" and python_version < \"3.13\"\n-pyspark==3.3.4 ; python_version >= \"3.9\" and python_version < \"4.0\"\n+pyspark==3.5.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n pystac-client==0.7.5 ; python_version >= \"3.10\" and python_version < \"3.13\"\n pystac[validation]==1.9.0 ; python_version >= \"3.10\" and python_version < \"3.13\"\n pytest-benchmark==4.0.0 ; python_version >= \"3.9\" and python_version < \"4.0\"\n"}
chore: try to fix extraction of client version from package.json (#1204)
904b847ea98bb58de54dd23630043b859cb1d882
chore
https://github.com/Hardeepex/crawlee/commit/904b847ea98bb58de54dd23630043b859cb1d882
try to fix extraction of client version from package.json (#1204)
{"package.json": "@@ -55,7 +55,7 @@\n \"@apify/ps-tree\": \"^1.1.4\",\n \"@apify/storage-local\": \"^2.0.1\",\n \"@apify/utilities\": \"^1.1.2\",\n- \"apify-client\": \"^2.0.1\",\n+ \"apify-client\": \"^2.0.2\",\n \"browser-pool\": \"^2.0.2\",\n \"cheerio\": \"1.0.0-rc.10\",\n \"content-type\": \"^1.0.4\",\n"}
perf: cache generalization results
8722bb3dd0d58dd60a5fbd291682e7927eb523cd
perf
https://github.com/erg-lang/erg/commit/8722bb3dd0d58dd60a5fbd291682e7927eb523cd
cache generalization results
{"generalize.rs": "@@ -8,6 +8,7 @@ use erg_common::{dict, fn_name, get_hash, set};\n #[allow(unused_imports)]\n use erg_common::{fmt_vec, log};\n \n+use crate::module::GeneralizationResult;\n use crate::ty::constructors::*;\n use crate::ty::free::{CanbeFree, Constraint, Free, HasLevel};\n use crate::ty::typaram::{TyParam, TyParamLambda};\n@@ -772,7 +773,7 @@ impl<'c, 'q, 'l, L: Locational> Dereferencer<'c, 'q, 'l, L> {\n false\n }\n };\n- let res = self.validate_subsup(sub_t, super_t);\n+ let res = self.validate_subsup(sub_t, super_t, &fv);\n if dummy {\n fv.undo();\n } else {\n@@ -972,7 +973,12 @@ impl<'c, 'q, 'l, L: Locational> Dereferencer<'c, 'q, 'l, L> {\n }\n }\n \n- fn validate_subsup(&mut self, sub_t: Type, super_t: Type) -> TyCheckResult<Type> {\n+ fn validate_subsup(\n+ &mut self,\n+ sub_t: Type,\n+ super_t: Type,\n+ fv: &Free<Type>,\n+ ) -> TyCheckResult<Type> {\n // TODO: Subr, ...\n match (sub_t, super_t) {\n /*(sub_t @ Type::Refinement(_), super_t @ Type::Refinement(_)) => {\n@@ -1016,16 +1022,30 @@ impl<'c, 'q, 'l, L: Locational> Dereferencer<'c, 'q, 'l, L> {\n }\n Ok(poly(rn, tps))\n }\n- (sub_t, super_t) => self.validate_simple_subsup(sub_t, super_t),\n+ (sub_t, super_t) => self.validate_simple_subsup(sub_t, super_t, fv),\n }\n }\n \n- fn validate_simple_subsup(&mut self, sub_t: Type, super_t: Type) -> TyCheckResult<Type> {\n- if self.ctx.is_trait(&super_t) {\n+ fn validate_simple_subsup(\n+ &mut self,\n+ sub_t: Type,\n+ super_t: Type,\n+ fv: &Free<Type>,\n+ ) -> TyCheckResult<Type> {\n+ let opt_res = self.ctx.shared().gen_cache.get(fv);\n+ if opt_res.is_none() && self.ctx.is_trait(&super_t) {\n self.ctx\n .check_trait_impl(&sub_t, &super_t, self.qnames, self.loc)?;\n }\n- let is_subtype = self.ctx.subtype_of(&sub_t, &super_t);\n+ let is_subtype = opt_res.map(|res| res.is_subtype).unwrap_or_else(|| {\n+ let is_subtype = self.ctx.subtype_of(&sub_t, &super_t); // PERF NOTE: bottleneck\n+ let res = GeneralizationResult {\n+ is_subtype,\n+ impl_trait: true,\n+ };\n+ self.ctx.shared().gen_cache.insert(fv.clone(), res);\n+ is_subtype\n+ });\n let sub_t = self.deref_tyvar(sub_t)?;\n let super_t = self.deref_tyvar(super_t)?;\n if sub_t == super_t {\n", "cache.rs": "@@ -15,6 +15,7 @@ use erg_parser::ast::Module;\n use crate::build_package::CheckStatus;\n use crate::context::ModuleContext;\n use crate::hir::HIR;\n+use crate::ty::free::FreeTyVar;\n \n #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n pub struct ModId(usize);\n@@ -395,3 +396,26 @@ impl SharedModuleCache {\n ref_.iter()\n }\n }\n+\n+#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n+pub struct GeneralizationResult {\n+ pub impl_trait: bool,\n+ pub is_subtype: bool,\n+}\n+\n+#[derive(Debug, Clone, Default)]\n+pub struct SharedGeneralizationCache(Shared<Dict<FreeTyVar, GeneralizationResult>>);\n+\n+impl SharedGeneralizationCache {\n+ pub fn new() -> Self {\n+ Self(Shared::new(Dict::new()))\n+ }\n+\n+ pub fn insert(&self, key: FreeTyVar, res: GeneralizationResult) {\n+ self.0.borrow_mut().insert(key, res);\n+ }\n+\n+ pub fn get(&self, key: &FreeTyVar) -> Option<GeneralizationResult> {\n+ self.0.borrow().get(key).cloned()\n+ }\n+}\n", "global.rs": "@@ -5,7 +5,7 @@ use erg_common::spawn::safe_yield;\n \n use crate::context::{Context, ModuleContext};\n \n-use super::cache::{ModuleEntry, SharedModuleCache};\n+use super::cache::{ModuleEntry, SharedGeneralizationCache, SharedModuleCache};\n use super::errors::{SharedCompileErrors, SharedCompileWarnings};\n use super::graph::SharedModuleGraph;\n use super::impls::SharedTraitImpls;\n@@ -25,6 +25,7 @@ pub struct SharedCompilerResource {\n pub promises: SharedPromises,\n pub errors: SharedCompileErrors,\n pub warns: SharedCompileWarnings,\n+ pub gen_cache: SharedGeneralizationCache,\n }\n \n impl SharedCompilerResource {\n@@ -41,6 +42,7 @@ impl SharedCompilerResource {\n promises: SharedPromises::new(graph, NormalizedPathBuf::from(cfg.input.path())),\n errors: SharedCompileErrors::new(),\n warns: SharedCompileWarnings::new(),\n+ gen_cache: SharedGeneralizationCache::new(),\n };\n Context::init_builtins(cfg, self_.clone());\n self_\n"}
fix: a subtyping bug
70510f6ae1a1021fc90aa9ea8935dce383fe0888
fix
https://github.com/erg-lang/erg/commit/70510f6ae1a1021fc90aa9ea8935dce383fe0888
a subtyping bug
{"compare.rs": "@@ -376,10 +376,12 @@ impl Context {\n && var_params_judge\n && default_check() // contravariant\n }\n+ // ?T(<: Int) :> ?U(:> Nat)\n+ // ?T(<: Int) :> ?U(:> Int)\n // ?T(<: Nat) !:> ?U(:> Int) (if the upper bound of LHS is smaller than the lower bound of RHS, LHS cannot not be a supertype)\n // ?T(<: Nat) :> ?U(<: Int) (?U can be smaller than ?T)\n (FreeVar(lfv), FreeVar(rfv)) => match (lfv.get_subsup(), rfv.get_subsup()) {\n- (Some((_, l_sup)), Some((r_sub, _))) => !self.subtype_of(&l_sup, &r_sub),\n+ (Some((_, l_sup)), Some((r_sub, _))) => self.supertype_of(&l_sup, &r_sub),\n _ => {\n if lfv.is_linked() {\n self.supertype_of(&lfv.crack(), rhs)\n", "unify.rs": "@@ -725,29 +725,56 @@ impl Context {\n )));\n }\n }\n- let new_constraint = Constraint::new_sandwiched(union, intersec);\n- match sub_fv\n- .level()\n- .unwrap_or(GENERIC_LEVEL)\n- .cmp(&sup_fv.level().unwrap_or(GENERIC_LEVEL))\n- {\n- std::cmp::Ordering::Less => {\n- sub_fv.update_constraint(new_constraint, false);\n- sup_fv.link(maybe_sub);\n- }\n- std::cmp::Ordering::Greater => {\n- sup_fv.update_constraint(new_constraint, false);\n- sub_fv.link(maybe_sup);\n- }\n- std::cmp::Ordering::Equal => {\n- // choose named one\n- if sup_fv.is_named_unbound() {\n- sup_fv.update_constraint(new_constraint, false);\n+ if union == intersec {\n+ match sub_fv\n+ .level()\n+ .unwrap_or(GENERIC_LEVEL)\n+ .cmp(&sup_fv.level().unwrap_or(GENERIC_LEVEL))\n+ {\n+ std::cmp::Ordering::Less => {\n+ sub_fv.link(&union);\n+ sup_fv.link(maybe_sub);\n+ }\n+ std::cmp::Ordering::Greater => {\n+ sup_fv.link(&union);\n sub_fv.link(maybe_sup);\n- } else {\n+ }\n+ std::cmp::Ordering::Equal => {\n+ // choose named one\n+ if sup_fv.is_named_unbound() {\n+ sup_fv.link(&union);\n+ sub_fv.link(maybe_sup);\n+ } else {\n+ sub_fv.link(&union);\n+ sup_fv.link(maybe_sub);\n+ }\n+ }\n+ }\n+ } else {\n+ let new_constraint = Constraint::new_sandwiched(union, intersec);\n+ match sub_fv\n+ .level()\n+ .unwrap_or(GENERIC_LEVEL)\n+ .cmp(&sup_fv.level().unwrap_or(GENERIC_LEVEL))\n+ {\n+ std::cmp::Ordering::Less => {\n sub_fv.update_constraint(new_constraint, false);\n sup_fv.link(maybe_sub);\n }\n+ std::cmp::Ordering::Greater => {\n+ sup_fv.update_constraint(new_constraint, false);\n+ sub_fv.link(maybe_sup);\n+ }\n+ std::cmp::Ordering::Equal => {\n+ // choose named one\n+ if sup_fv.is_named_unbound() {\n+ sup_fv.update_constraint(new_constraint, false);\n+ sub_fv.link(maybe_sup);\n+ } else {\n+ sub_fv.update_constraint(new_constraint, false);\n+ sup_fv.link(maybe_sub);\n+ }\n+ }\n }\n }\n Ok(())\n", "with.er": "@@ -1,6 +1,10 @@\n with! open!(\"examples/record.er\"), f =>\n print! f.read!()\n \n+with! open!(\"examples/set.er\"), f =>\n+ for! f.readlines!(), line =>\n+ print!(\"line: \" + line)\n+\n open_file!(path) =\n with! open!(path, mode:=\"r\" , encoding:=\"utf_8\"), f =>\n data = f.read!()\n"}
fix: re-export `gix_ignore` as `ignore` as it's part of the public API.
b839b5d0190c3d687c289a87c8ff2e44b363b049
fix
https://github.com/Byron/gitoxide/commit/b839b5d0190c3d687c289a87c8ff2e44b363b049
re-export `gix_ignore` as `ignore` as it's part of the public API.
{"lib.rs": "@@ -18,6 +18,8 @@ pub use gix_glob as glob;\n /// Provides types needed for using [`stack::Platform::excluded_kind()`].\n pub use gix_ignore as ignore;\n /// Provides types needed for using [`Stack::at_path()`] and [`Stack::at_entry()`].\n+pub use gix_index as index;\n+/// Provides types needed for using [`Stack::at_path()`] and [`Stack::at_entry()`].\n pub use gix_object as object;\n /// Provides types needed for using [`stack::State::for_checkout()`].\n #[cfg(feature = \"attributes\")]\n"}
ci: use an output instead of the exit code [skip ci]
bccefa936904524b12e66c984c62be9b51098961
ci
https://github.com/ibis-project/ibis/commit/bccefa936904524b12e66c984c62be9b51098961
use an output instead of the exit code [skip ci]
{"pr-title.yml": "@@ -27,7 +27,13 @@ jobs:\n run: npm install \"@commitlint/config-conventional\"\n \n - name: run commitlint\n- run: npx commitlint --extends \"@commitlint/config-conventional\" --verbose <<< \"$COMMIT_MSG\"\n+ id: lint\n+ run: |\n+ failed=0\n+ if ! npx commitlint --extends \"@commitlint/config-conventional\" --verbose <<< \"$COMMIT_MSG\"; then\n+ failed=1\n+ fi\n+ echo \"failed=$failed\" >> \"$GITHUB_OUTPUT\"\n env:\n COMMIT_MSG: |\n ${{ github.event.pull_request.title }}\n@@ -35,7 +41,7 @@ jobs:\n ${{ github.event.pull_request.body }}\n \n - name: find existing comment\n- if: failure()\n+ if: steps.lint.outputs.failed == '1'\n uses: peter-evans/find-comment@v2\n id: fc\n with:\n@@ -43,7 +49,7 @@ jobs:\n body-regex: '\\*\\*ACTION NEEDED\\*\\*.+'\n \n - name: post a message if the pull request title and body fail `commitlint`\n- if: success() && steps.fc.outputs.comment-body == ''\n+ if: steps.lint.outputs.failed == '1' && steps.fc.outputs.comment-body == ''\n uses: peter-evans/create-or-update-comment@v3\n with:\n issue-number: ${{ github.event.pull_request.number }}\n"}
fix: implement `Clone` for `PartialName`. Also improve documentation for `FullName*` and `PartialName*` types.
8e8c71ba1238f0c8177f84810c2d9ed165010d7e
fix
https://github.com/Byron/gitoxide/commit/8e8c71ba1238f0c8177f84810c2d9ed165010d7e
implement `Clone` for `PartialName`. Also improve documentation for `FullName*` and `PartialName*` types.
{"lib.rs": "@@ -104,28 +104,27 @@ pub(crate) struct Store {\n inner: store::State,\n }\n \n-/// Indicate that the given BString is a validate reference name or path that can be used as path on disk or written as target\n-/// of a symbolic reference\n+/// A validated complete and fully qualified referenced reference name, safe to use for all operations.\n #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]\n #[cfg_attr(feature = \"serde1\", derive(serde::Serialize, serde::Deserialize))]\n pub struct FullName(pub(crate) BString);\n \n-/// A validated and potentially partial reference name - it can safely be used for common operations.\n+/// A validated complete and fully qualified referenced reference name, safe to use for all operations.\n #[derive(Hash, Debug, PartialEq, Eq, Ord, PartialOrd)]\n #[repr(transparent)]\n pub struct FullNameRef(BStr);\n \n-/// A validated complete and fully qualified reference name, safe to use for all operations.\n+/// A validated and potentially partial reference name, safe to use for common operations.\n #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]\n pub struct PartialNameCow<'a>(Cow<'a, BStr>);\n \n-/// A validated complete and fully qualified referenced reference name, safe to use for all operations.\n+/// A validated and potentially partial reference name, safe to use for common operations.\n #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]\n #[repr(transparent)]\n pub struct PartialNameRef(BStr);\n \n-/// A validated complete and fully qualified owned reference name, safe to use for all operations.\n-#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd)]\n+/// A validated and potentially partial reference name, safe to use for common operations.\n+#[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)]\n pub struct PartialName(BString);\n \n /// A _validated_ prefix for references to act as a namespace.\n"}
fix(mongo): restrict object id conversion only to known properties Related: #401
86cd0277d048021ee244c3cf3537afc187d69683
fix
https://github.com/mikro-orm/mikro-orm/commit/86cd0277d048021ee244c3cf3537afc187d69683
restrict object id conversion only to known properties Related: #401
{"MongoConnection.ts": "@@ -70,7 +70,6 @@ export class MongoConnection extends Connection {\n \n async find<T extends AnyEntity<T>>(collection: string, where: FilterQuery<T>, orderBy?: QueryOrderMap, limit?: number, offset?: number, fields?: string[], ctx?: Transaction<ClientSession>): Promise<T[]> {\n collection = this.getCollectionName(collection);\n- where = this.convertObjectIds(where as Dictionary);\n const options: Dictionary = { session: ctx };\n \n if (fields) {\n@@ -156,7 +155,6 @@ export class MongoConnection extends Connection {\n \n private async runQuery<T extends { _id: any }, U extends QueryResult | number = QueryResult>(method: 'insertOne' | 'updateMany' | 'deleteMany' | 'countDocuments', collection: string, data?: Partial<T>, where?: FilterQuery<T>, ctx?: Transaction<ClientSession>): Promise<U> {\n collection = this.getCollectionName(collection);\n- where = this.convertObjectIds(where as Dictionary);\n const options: Dictionary = { session: ctx };\n const now = Date.now();\n let res: InsertOneWriteOpResult<T> | UpdateWriteOpResult | DeleteWriteOpResultObject | number;\n@@ -188,28 +186,6 @@ export class MongoConnection extends Connection {\n return this.transformResult(res!) as U;\n }\n \n- private convertObjectIds<T extends ObjectId | Dictionary | any[]>(payload: T): T {\n- if (payload instanceof ObjectId) {\n- return payload;\n- }\n-\n- if (Utils.isString(payload) && payload.match(/^[0-9a-f]{24}$/i)) {\n- return new ObjectId(payload) as T;\n- }\n-\n- if (Array.isArray(payload)) {\n- return payload.map((item: any) => this.convertObjectIds(item)) as T;\n- }\n-\n- if (Utils.isObject(payload)) {\n- Object.keys(payload).forEach(k => {\n- payload[k] = this.convertObjectIds(payload[k]);\n- });\n- }\n-\n- return payload;\n- }\n-\n private transformResult(res: any): QueryResult {\n return {\n affectedRows: res.modifiedCount || res.deletedCount || 0,\n", "MongoDriver.ts": "@@ -1,11 +1,12 @@\n import { ClientSession, ObjectId } from 'mongodb';\n import { DatabaseDriver } from './DatabaseDriver';\n import { MongoConnection } from '../connections/MongoConnection';\n-import { EntityData, AnyEntity, FilterQuery, EntityMetadata, EntityProperty, Dictionary } from '../typings';\n+import { AnyEntity, Dictionary, EntityData, EntityMetadata, EntityProperty, FilterQuery } from '../typings';\n import { Configuration, Utils } from '../utils';\n import { MongoPlatform } from '../platforms/MongoPlatform';\n import { FindOneOptions, FindOptions } from './IDatabaseDriver';\n import { QueryResult, Transaction } from '../connections';\n+import { ReferenceType } from '../entity';\n \n export class MongoDriver extends DatabaseDriver<MongoConnection> {\n \n@@ -166,10 +167,46 @@ export class MongoDriver extends DatabaseDriver<MongoConnection> {\n if (prop.fieldNames) {\n Utils.renameKey(data, k, prop.fieldNames[0]);\n }\n+\n+ let isObjectId: boolean;\n+\n+ if (prop.reference === ReferenceType.SCALAR) {\n+ isObjectId = prop.type.toLowerCase() === 'objectid';\n+ } else {\n+ const meta2 = this.metadata.get(prop.type);\n+ const pk = meta2.properties[meta2.primaryKeys[0]];\n+ isObjectId = pk.type.toLowerCase() === 'objectid';\n+ }\n+\n+ if (isObjectId) {\n+ data[k] = this.convertObjectIds(data[k]);\n+ }\n }\n });\n \n return data;\n }\n \n+ private convertObjectIds<T extends ObjectId | Dictionary | any[]>(data: T): T {\n+ if (data instanceof ObjectId) {\n+ return data;\n+ }\n+\n+ if (Utils.isString(data) && data.match(/^[0-9a-f]{24}$/i)) {\n+ return new ObjectId(data) as T;\n+ }\n+\n+ if (Array.isArray(data)) {\n+ return data.map((item: any) => this.convertObjectIds(item)) as T;\n+ }\n+\n+ if (Utils.isObject(data)) {\n+ Object.keys(data).forEach(k => {\n+ data[k] = this.convertObjectIds(data[k]);\n+ });\n+ }\n+\n+ return data;\n+ }\n+\n }\n", "GH401.test.ts": "@@ -11,6 +11,9 @@ class Entity401 {\n @Property()\n data: Dictionary;\n \n+ @Property()\n+ bar?: string;\n+\n constructor(data = {}) {\n this.data = data;\n }\n@@ -35,14 +38,24 @@ describe('GH issue 401', () => {\n afterAll(() => orm.close(true));\n \n test('do not automatically convert string to ObjectId in the all cases', async () => {\n- const a = new Entity401({ foo: '0000007b5c9c61c332380f78' });\n- expect(a.data.foo).toBe('0000007b5c9c61c332380f78');\n+ const id = '0000007b5c9c61c332380f78';\n+ const a = new Entity401({ foo: id });\n+ a.bar = id;\n+ expect(a.data.foo).toBe(id);\n+ expect(a.bar).toBe(id);\n await orm.em.persistAndFlush(a);\n expect(a.data.foo).not.toBeInstanceOf(ObjectId);\n+ expect(a.bar).not.toBeInstanceOf(ObjectId);\n orm.em.clear();\n \n const getA = await orm.em.findOneOrFail(Entity401, a._id);\n expect(getA!.data.foo).not.toBeInstanceOf(ObjectId);\n+ expect(getA!.bar).not.toBeInstanceOf(ObjectId);\n+ orm.em.clear();\n+\n+ const getA2 = await orm.em.findOneOrFail(Entity401, { bar: id });\n+ expect(getA2!.data.foo).not.toBeInstanceOf(ObjectId);\n+ expect(getA2!.bar).not.toBeInstanceOf(ObjectId);\n });\n \n });\n"}
feat: add `ident::apply()` to substitute `$Id$` with `$Id: <hex>$`
306c8eabcffe80da1d627283c4b188a1b979f692
feat
https://github.com/Byron/gitoxide/commit/306c8eabcffe80da1d627283c4b188a1b979f692
add `ident::apply()` to substitute `$Id$` with `$Id: <hex>$`
{"Cargo.lock": "@@ -1650,6 +1650,8 @@ name = \"gix-filter\"\n version = \"0.0.0\"\n dependencies = [\n \"bstr\",\n+ \"gix-hash 0.11.3\",\n+ \"gix-object 0.31.0\",\n ]\n \n [[package]]\n", "Cargo.toml": "@@ -12,4 +12,7 @@ rust-version = \"1.65\"\n doctest = false\n \n [dependencies]\n+gix-hash = { version = \"^0.11.3\", path = \"../gix-hash\" }\n+gix-object = { version = \"^0.31.0\", path = \"../gix-object\" }\n+\n bstr = { version = \"1.5.0\", default-features = false, features = [\"std\"] }\n", "lib.rs": "@@ -44,4 +44,30 @@ pub mod ident {\n }\n input\n }\n+\n+ /// Substitute all occurrences of `$Id$` with `$Id: <hexsha-of-input>$` if present and return the changed buffer, with `object_hash`\n+ /// being used accordingly.\n+ ///\n+ /// ### Deviation\n+ ///\n+ /// `Git` also tries to cleanup 'stray' substituted `$Id: <hex>$`, but we don't do that, sticking exactly to what ought to be done.\n+ /// The respective code is up to 16 years old and one might assume that `git` by now handles checking and checkout filters correctly.\n+ pub fn apply(mut input: Cow<'_, BStr>, object_hash: gix_hash::Kind) -> Cow<'_, BStr> {\n+ let mut buf: [u8; b\": $\".len() + gix_hash::Kind::longest().len_in_hex()] = std::array::from_fn(|_| 0);\n+ let mut id = None;\n+ let mut ofs = 0;\n+ while let Some(pos) = input[ofs..].find(b\"$Id$\") {\n+ let id = id.get_or_insert_with(|| gix_object::compute_hash(object_hash, gix_object::Kind::Blob, &input));\n+\n+ buf[..2].copy_from_slice(b\": \");\n+ let _ = id.hex_to_buf(&mut buf[2..][..object_hash.len_in_hex()]);\n+ let replaced_id = &mut buf[..2 + object_hash.len_in_hex() + 1];\n+ *replaced_id.last_mut().expect(\"present\") = b'$';\n+ input\n+ .to_mut()\n+ .replace_range((ofs + pos + 3)..(ofs + pos + 4), &*replaced_id);\n+ ofs += pos + 3 + replaced_id.len();\n+ }\n+ input\n+ }\n }\n", "mod.rs": "@@ -56,3 +56,52 @@ mod undo {\n );\n }\n }\n+\n+mod apply {\n+ use crate::ident::cowstr;\n+ use gix_filter::ident;\n+ use std::borrow::Cow;\n+\n+ #[test]\n+ fn no_change() {\n+ for input_no_match in [\n+ \"\",\n+ \"nothing\",\n+ \"$ID$ case sensitive matching\",\n+ \"$Id: expanded is ignored$\",\n+ ] {\n+ let res = ident::apply(cowstr(input_no_match), gix_hash::Kind::Sha1);\n+ assert!(\n+ matches!(res, Cow::Borrowed(_)),\n+ \"no substitution happens, so no mutable version of the Cow is created\"\n+ );\n+ assert_eq!(res.as_ref(), input_no_match, \"there definitely is no change\");\n+ }\n+ }\n+\n+ #[test]\n+ fn simple() {\n+ assert_eq!(\n+ ident::apply(cowstr(\"$Id$\"), gix_hash::Kind::Sha1).as_ref(),\n+ \"$Id: b3f5ebfb5843bc43ceecff6d4f26bb37c615beb1$\"\n+ );\n+\n+ assert_eq!(\n+ ident::apply(cowstr(\"$Id$ $Id$\"), gix_hash::Kind::Sha1).as_ref(),\n+ \"$Id: f6f3176060328ef7030a8b8eeda57fbf0587b2f9$ $Id: f6f3176060328ef7030a8b8eeda57fbf0587b2f9$\"\n+ );\n+ }\n+\n+ #[test]\n+ fn round_trips() {\n+ for input in [\n+ \"hi\\n$Id$\\nho\\n\\t$Id$$Id$$Id$\",\n+ \"$Id$\",\n+ \"$Id$ and one more $Id$ and done\",\n+ ] {\n+ let res = ident::apply(cowstr(input), gix_hash::Kind::Sha1);\n+ assert_ne!(res.as_ref(), input, \"the input was rewritten\");\n+ assert_eq!(ident::undo(res).as_ref(), input, \"the filter can be undone perfectly\");\n+ }\n+ }\n+}\n"}
feat(duckdb): enable find_in_set test
377023d390b96492508405fcc0fcb172680015c6
feat
https://github.com/rohankumardubey/ibis/commit/377023d390b96492508405fcc0fcb172680015c6
enable find_in_set test
{"test_string.py": "@@ -128,7 +128,7 @@ def test_string_col_is_unicode(alltypes, df):\n lambda t: t.string_col.str.find('1'),\n id='find_in_set',\n marks=pytest.mark.notimpl(\n- [\"datafusion\", \"duckdb\", \"mysql\", \"pyspark\", \"sqlite\"]\n+ [\"datafusion\", \"mysql\", \"pyspark\", \"sqlite\"]\n ),\n ),\n param(\n@@ -136,7 +136,7 @@ def test_string_col_is_unicode(alltypes, df):\n lambda t: t.string_col.str.find('a'),\n id='find_in_set_all_missing',\n marks=pytest.mark.notimpl(\n- [\"datafusion\", \"duckdb\", \"mysql\", \"pyspark\", \"sqlite\"]\n+ [\"datafusion\", \"mysql\", \"pyspark\", \"sqlite\"]\n ),\n ),\n param(\n"}
feat(api): add `to_sqlglot` method to `Schema` objects (#10063)
9488115b588ebf6ba0814ebbac9937c8bfc8b517
feat
https://github.com/rohankumardubey/ibis/commit/9488115b588ebf6ba0814ebbac9937c8bfc8b517
add `to_sqlglot` method to `Schema` objects (#10063)
{"__init__.py": "@@ -334,20 +334,7 @@ class Backend(SQLBackend, UrlFromPath):\n return table.to_reader(max_chunksize=chunk_size)\n \n def _generate_create_table(self, table: sge.Table, schema: sch.Schema):\n- column_defs = [\n- sge.ColumnDef(\n- this=sg.to_identifier(colname, quoted=self.compiler.quoted),\n- kind=self.compiler.type_mapper.from_ibis(typ),\n- constraints=(\n- None\n- if typ.nullable\n- else [sge.ColumnConstraint(kind=sge.NotNullColumnConstraint())]\n- ),\n- )\n- for colname, typ in schema.items()\n- ]\n-\n- target = sge.Schema(this=table, expressions=column_defs)\n+ target = sge.Schema(this=table, expressions=schema.to_sqlglot(self.dialect))\n \n return sge.Create(kind=\"TABLE\", this=target)\n \n", "base.py": "@@ -32,6 +32,7 @@ from ibis.backends.sql.rewrites import (\n from ibis.config import options\n from ibis.expr.operations.udf import InputType\n from ibis.expr.rewrites import lower_stringslice\n+from ibis.util import get_subclasses\n \n try:\n from sqlglot.expressions import Alter\n@@ -51,15 +52,7 @@ if TYPE_CHECKING:\n from ibis.backends.sql.datatypes import SqlglotType\n \n \n-def get_leaf_classes(op):\n- for child_class in op.__subclasses__():\n- if not child_class.__subclasses__():\n- yield child_class\n- else:\n- yield from get_leaf_classes(child_class)\n-\n-\n-ALL_OPERATIONS = frozenset(get_leaf_classes(ops.Node))\n+ALL_OPERATIONS = frozenset(get_subclasses(ops.Node))\n \n \n class AggGen:\n", "datatypes.py": "@@ -10,6 +10,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n from ibis.common.collections import FrozenDict\n from ibis.formats import TypeMapper\n+from ibis.util import get_subclasses\n \n typecode = sge.DataType.Type\n \n@@ -493,6 +494,7 @@ class RisingWaveType(PostgresType):\n \n \n class DataFusionType(PostgresType):\n+ dialect = \"datafusion\"\n unknown_type_strings = {\n \"utf8\": dt.string,\n \"float64\": dt.float64,\n@@ -1156,3 +1158,9 @@ class FlinkType(SqlglotType):\n ],\n nested=True,\n )\n+\n+\n+TYPE_MAPPERS = {\n+ mapper.dialect: mapper\n+ for mapper in set(get_subclasses(SqlglotType)) - {SqlglotType, BigQueryUDFType}\n+}\n", "test_core.py": "@@ -13,6 +13,7 @@ import ibis.expr.datatypes as dt\n from ibis.common.annotations import ValidationError\n from ibis.common.patterns import As, Attrs, NoMatch, Pattern\n from ibis.common.temporal import TimestampUnit, TimeUnit\n+from ibis.util import get_subclasses\n \n \n def test_validate_type():\n@@ -541,15 +542,9 @@ def test_timestamp_from_unit():\n )\n \n \n-def get_leaf_classes(op):\n- for child_class in op.__subclasses__():\n- yield child_class\n- yield from get_leaf_classes(child_class)\n-\n-\n @pytest.mark.parametrize(\n \"dtype_class\",\n- set(get_leaf_classes(dt.DataType))\n+ set(get_subclasses(dt.DataType))\n - {\n # these require special case tests\n dt.Array,\n", "schema.py": "@@ -16,6 +16,9 @@ from ibis.util import indent\n if TYPE_CHECKING:\n from typing import TypeAlias\n \n+ import sqlglot as sg\n+ import sqlglot.expressions as sge\n+\n \n class Schema(Concrete, Coercible, MapSet):\n \"\"\"An ordered mapping of str -> [datatype](./datatypes.qmd), used to hold a [Table](./expression-tables.qmd#ibis.expr.tables.Table)'s schema.\"\"\"\n@@ -226,6 +229,74 @@ class Schema(Concrete, Coercible, MapSet):\n \"\"\"\n return self.names[i]\n \n+ def to_sqlglot(self, dialect: str | sg.Dialect) -> list[sge.ColumnDef]:\n+ \"\"\"Convert the schema to a list of SQL column definitions.\n+\n+ Parameters\n+ ----------\n+ dialect\n+ The SQL dialect to use.\n+\n+ Returns\n+ -------\n+ list[sqlglot.expressions.ColumnDef]\n+ A list of SQL column definitions.\n+\n+ Examples\n+ --------\n+ >>> import ibis\n+ >>> sch = ibis.schema({\"a\": \"int\", \"b\": \"!string\"})\n+ >>> sch\n+ ibis.Schema {\n+ a int64\n+ b !string\n+ }\n+ >>> columns = sch.to_sqlglot(dialect=\"duckdb\")\n+ >>> columns\n+ [ColumnDef(\n+ this=Identifier(this=a, quoted=True),\n+ kind=DataType(this=Type.BIGINT)), ColumnDef(\n+ this=Identifier(this=b, quoted=True),\n+ kind=DataType(this=Type.VARCHAR),\n+ constraints=[\n+ ColumnConstraint(\n+ kind=NotNullColumnConstraint())])]\n+\n+ One use case for this method is to embed its output into a SQLGlot\n+ `CREATE TABLE` expression.\n+\n+ >>> import sqlglot as sg\n+ >>> import sqlglot.expressions as sge\n+ >>> table = sg.table(\"t\", quoted=True)\n+ >>> ct = sge.Create(\n+ ... kind=\"TABLE\",\n+ ... this=sge.Schema(\n+ ... this=table,\n+ ... expressions=columns,\n+ ... ),\n+ ... )\n+ >>> ct.sql(dialect=\"duckdb\")\n+ 'CREATE TABLE \"t\" (\"a\" BIGINT, \"b\" TEXT NOT NULL)'\n+ \"\"\"\n+ import sqlglot as sg\n+ import sqlglot.expressions as sge\n+\n+ from ibis.backends.sql.datatypes import TYPE_MAPPERS as type_mappers\n+\n+ type_mapper = type_mappers[dialect]\n+ return [\n+ sge.ColumnDef(\n+ this=sg.to_identifier(name, quoted=True),\n+ kind=type_mapper.from_ibis(dtype),\n+ constraints=(\n+ None\n+ if dtype.nullable\n+ else [sge.ColumnConstraint(kind=sge.NotNullColumnConstraint())]\n+ ),\n+ )\n+ for name, dtype in self.items()\n+ ]\n+\n \n SchemaLike: TypeAlias = Union[\n Schema,\n", "util.py": "@@ -32,6 +32,7 @@ if TYPE_CHECKING:\n import ibis.expr.types as ir\n \n T = TypeVar(\"T\", covariant=True)\n+S = TypeVar(\"S\", bound=T, covariant=True)\n U = TypeVar(\"U\", covariant=True)\n K = TypeVar(\"K\")\n V = TypeVar(\"V\")\n@@ -702,3 +703,47 @@ def chunks(n: int, *, chunk_size: int) -> Iterator[tuple[int, int]]:\n [(0, 4), (4, 8), (8, 10)]\n \"\"\"\n return ((start, min(start + chunk_size, n)) for start in range(0, n, chunk_size))\n+\n+\n+def get_subclasses(obj: type[T]) -> Iterator[type[S]]:\n+ \"\"\"Recursively compute all subclasses of `obj`.\n+\n+ ::: {.callout-note}\n+ ## The resulting iterator does **not** include the input type object.\n+ :::\n+\n+ Parameters\n+ ----------\n+ obj\n+ Any type object\n+\n+ Examples\n+ --------\n+ >>> class Base: ...\n+ >>> class Subclass1(Base): ...\n+ >>> class Subclass2(Base): ...\n+ >>> class TransitiveSubclass(Subclass2): ...\n+\n+ Everything inherits `Base` (directly or transitively)\n+\n+ >>> list(get_subclasses(Base))\n+ [<class 'ibis.util.Subclass1'>, <class 'ibis.util.Subclass2'>, <class 'ibis.util.TransitiveSubclass'>]\n+\n+ Nothing inherits from `Subclass1`\n+\n+ >>> list(get_subclasses(Subclass1))\n+ []\n+\n+ Only `TransitiveSubclass` inherits from `Subclass2`\n+\n+ >>> list(get_subclasses(Subclass2))\n+ [<class 'ibis.util.TransitiveSubclass'>]\n+\n+ Nothing inherits from `TransitiveSubclass`\n+\n+ >>> list(get_subclasses(TransitiveSubclass))\n+ []\n+ \"\"\"\n+ for child_class in obj.__subclasses__():\n+ yield child_class\n+ yield from get_subclasses(child_class)\n"}
build: updates
418b99b8f43cf88f2f999f1228c09c31990c480d
build
https://github.com/tsparticles/tsparticles/commit/418b99b8f43cf88f2f999f1228c09c31990c480d
updates
{"yarn.lock": "@@ -2882,11 +2882,6 @@ tslib@^1.8.1, tslib@^1.9.0:\n resolved \"https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00\"\n integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==\n \n-tsparticles-engine@^2.0.6:\n- version \"2.0.6\"\n- resolved \"https://registry.yarnpkg.com/tsparticles-engine/-/tsparticles-engine-2.0.6.tgz#27e85d8d014a2d9435ffc9a2c7efd570baf6dc64\"\n- integrity sha512-qsAMp68No1ncbFQbyjILrVtS9LeNTtBmxPa91COvHq4iLELReUjVxXTWfLWi0+xczTbdpNPRPx92r0V3xlqeHw==\n-\n tsutils@^3.21.0:\n version \"3.21.0\"\n resolved \"https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623\"\n", "README.md": "@@ -26,11 +26,11 @@ Once the scripts are loaded you can set up `tsParticles` and the interaction plu\n \n ```javascript\n (async () => {\n- await loadParallaxMover(tsParticles);\n+ await loadParallaxMover(tsParticles);\n \n- await tsParticles.load(\"tsparticles\", {\n- /* options */\n- });\n+ await tsParticles.load(\"tsparticles\", {\n+ /* options */\n+ });\n })();\n ```\n \n"}
refactor(exasol): port to sqlglot (#8032) This PR ports exasol to sqlglot instead of sqlalchemy.
b8bcbf2bd74c948bb40473a70a7f5a09344f54d6
refactor
https://github.com/rohankumardubey/ibis/commit/b8bcbf2bd74c948bb40473a70a7f5a09344f54d6
port to sqlglot (#8032) This PR ports exasol to sqlglot instead of sqlalchemy.
{"ibis-backends.yml": "@@ -164,13 +164,13 @@ jobs:\n # - oracle\n # services:\n # - oracle\n- # - name: exasol\n- # title: Exasol\n- # serial: true\n- # extras:\n- # - exasol\n- # services:\n- # - exasol\n+ - name: exasol\n+ title: Exasol\n+ serial: true\n+ extras:\n+ - exasol\n+ services:\n+ - exasol\n # - name: flink\n # title: Flink\n # serial: true\n@@ -301,21 +301,21 @@ jobs:\n # - flink\n # - os: windows-latest\n # backend:\n- # name: exasol\n- # title: Exasol\n- # serial: true\n- # extras:\n- # - exasol\n- # services:\n- # - exasol\n- # - os: windows-latest\n- # backend:\n # name: risingwave\n # title: Risingwave\n # services:\n # - risingwave\n # extras:\n # - risingwave\n+ - os: windows-latest\n+ backend:\n+ name: exasol\n+ title: Exasol\n+ serial: true\n+ extras:\n+ - exasol\n+ services:\n+ - exasol\n steps:\n - name: update and install system dependencies\n if: matrix.os == 'ubuntu-latest' && matrix.backend.sys-deps != null\n@@ -615,46 +615,6 @@ jobs:\n with:\n flags: backend,pyspark,${{ runner.os }},python-${{ steps.install_python.outputs.python-version }}\n \n- # gen_lockfile_sqlalchemy2:\n- # name: Generate Poetry Lockfile for SQLAlchemy 2\n- # runs-on: ubuntu-latest\n- # steps:\n- # - name: checkout\n- # uses: actions/checkout@v4\n- #\n- # - name: install python\n- # uses: actions/setup-python@v5\n- # with:\n- # python-version: \"3.11\"\n- #\n- # - run: python -m pip install --upgrade pip 'poetry==1.7.1'\n- #\n- # - name: remove deps that are not compatible with sqlalchemy 2\n- # run: poetry remove sqlalchemy-exasol\n- #\n- # - name: add sqlalchemy 2\n- # run: poetry add --lock --optional 'sqlalchemy>=2,<3'\n- #\n- # - name: checkout the lock file\n- # run: git checkout poetry.lock\n- #\n- # - name: lock with no updates\n- # # poetry add is aggressive and will update other dependencies like\n- # # numpy and pandas so we keep the pyproject.toml edits and then relock\n- # # without updating anything except the requested versions\n- # run: poetry lock --no-update\n- #\n- # - name: check the sqlalchemy version\n- # run: poetry show sqlalchemy --no-ansi | grep version | cut -d ':' -f2- | sed 's/ //g' | grep -P '^2\\.'\n- #\n- # - name: upload deps file\n- # uses: actions/upload-artifact@v3\n- # with:\n- # name: deps\n- # path: |\n- # pyproject.toml\n- # poetry.lock\n-\n # test_backends_sqlalchemy2:\n # name: SQLAlchemy 2 ${{ matrix.backend.title }} ${{ matrix.os }} python-${{ matrix.python-version }}\n # runs-on: ${{ matrix.os }}\n", "exasol.sql": "@@ -1,7 +1,7 @@\n DROP SCHEMA IF EXISTS EXASOL CASCADE;\n CREATE SCHEMA EXASOL;\n \n-CREATE OR REPLACE TABLE EXASOL.diamonds\n+CREATE OR REPLACE TABLE EXASOL.\"diamonds\"\n (\n \"carat\" DOUBLE,\n \"cut\" VARCHAR(256),\n@@ -15,13 +15,13 @@ CREATE OR REPLACE TABLE EXASOL.diamonds\n \"z\" DOUBLE\n );\n \n-CREATE OR REPLACE TABLE EXASOL.batting\n+CREATE OR REPLACE TABLE EXASOL.\"batting\"\n (\n \"playerID\" VARCHAR(256),\n \"yearID\" BIGINT,\n \"stint\" BIGINT,\n \"teamID\" VARCHAR(256),\n- \"logID\" VARCHAR(256),\n+ \"lgID\" VARCHAR(256),\n \"G\" BIGINT,\n \"AB\" BIGINT,\n \"R\" BIGINT,\n@@ -41,22 +41,22 @@ CREATE OR REPLACE TABLE EXASOL.batting\n \"GIDP\" BIGINT\n );\n \n-CREATE OR REPLACE TABLE EXASOL.awards_players\n+CREATE OR REPLACE TABLE EXASOL.\"awards_players\"\n (\n- \"playerId\" VARCHAR(256),\n+ \"playerID\" VARCHAR(256),\n \"awardID\" VARCHAR(256),\n- \"yearID\" VARCHAR(256),\n- \"logID\" VARCHAR(256),\n+ \"yearID\" BIGINT,\n+ \"lgID\" VARCHAR(256),\n \"tie\" VARCHAR(256),\n \"notest\" VARCHAR(256)\n );\n \n-CREATE OR REPLACE TABLE EXASOL.functional_alltypes\n+CREATE OR REPLACE TABLE EXASOL.\"functional_alltypes\"\n (\n \"id\" INTEGER,\n \"bool_col\" BOOLEAN,\n \"tinyint_col\" SHORTINT,\n- \"small_int\" SMALLINT,\n+ \"smallint_col\" SMALLINT,\n \"int_col\" INTEGER,\n \"bigint_col\" BIGINT,\n \"float_col\" FLOAT,\n@@ -69,7 +69,21 @@ CREATE OR REPLACE TABLE EXASOL.functional_alltypes\n );\n \n \n-IMPORT INTO EXASOL.diamonds FROM LOCAL CSV FILE '/data/diamonds.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n-IMPORT INTO EXASOL.batting FROM LOCAL CSV FILE '/data/batting.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n-IMPORT INTO EXASOL.awards_players FROM LOCAL CSV FILE '/data/awards_players.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n-IMPORT INTO EXASOL.functional_alltypes FROM LOCAL CSV FILE '/data/functional_alltypes.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n+IMPORT INTO EXASOL.\"diamonds\" FROM LOCAL CSV FILE '/data/diamonds.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n+IMPORT INTO EXASOL.\"batting\" FROM LOCAL CSV FILE '/data/batting.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n+IMPORT INTO EXASOL.\"awards_players\" FROM LOCAL CSV FILE '/data/awards_players.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n+IMPORT INTO EXASOL.\"functional_alltypes\" FROM LOCAL CSV FILE '/data/functional_alltypes.csv' COLUMN SEPARATOR = ',' SKIP = 1;\n+\n+CREATE OR REPLACE TABLE EXASOL.\"win\"\n+(\n+ \"g\" VARCHAR(1),\n+ \"x\" BIGINT,\n+ \"y\" BIGINT\n+);\n+\n+INSERT INTO \"win\" VALUES\n+ ('a', 0, 3),\n+ ('a', 1, 2),\n+ ('a', 2, 0),\n+ ('a', 3, 1),\n+ ('a', 4, 1);\n", "datatypes.py": "@@ -1,26 +0,0 @@\n-from __future__ import annotations\n-\n-from typing import TYPE_CHECKING\n-\n-import sqlalchemy.types as sa_types\n-\n-from ibis.backends.base.sql.alchemy.datatypes import AlchemyType\n-\n-if TYPE_CHECKING:\n- import ibis.expr.datatypes as dt\n-\n-\n-class ExasolSQLType(AlchemyType):\n- dialect = \"exa.websocket\"\n-\n- @classmethod\n- def from_ibis(cls, dtype: dt.DataType) -> sa_types.TypeEngine:\n- if dtype.is_string():\n- # see also: https://docs.exasol.com/db/latest/sql_references/data_types/datatypesoverview.htm\n- MAX_VARCHAR_SIZE = 2_000_000\n- return sa_types.VARCHAR(MAX_VARCHAR_SIZE)\n- return super().from_ibis(dtype)\n-\n- @classmethod\n- def to_ibis(cls, typ: sa_types.TypeEngine, nullable: bool = True) -> dt.DataType:\n- return super().to_ibis(typ, nullable=nullable)\n", "conftest.py": "@@ -4,6 +4,8 @@ import os\n import subprocess\n from typing import TYPE_CHECKING\n \n+import sqlglot as sg\n+\n import ibis\n from ibis.backends.tests.base import (\n ServiceBackendTest,\n@@ -40,19 +42,24 @@ class TestConf(ServiceBackendTest):\n service_name = \"exasol\"\n supports_tpch = False\n force_sort = True\n- deps = \"sqlalchemy\", \"sqlalchemy_exasol\", \"pyexasol\"\n+ deps = (\"pyexasol\",)\n \n @staticmethod\n def connect(*, tmpdir, worker_id, **kw: Any):\n- kwargs = {\n- \"user\": EXASOL_USER,\n- \"password\": EXASOL_PASS,\n- \"host\": EXASOL_HOST,\n- \"port\": EXASOL_PORT,\n- \"schema\": IBIS_TEST_EXASOL_DB,\n- \"certificate_validation\": False,\n- }\n- return ibis.exasol.connect(**kwargs)\n+ return ibis.exasol.connect(\n+ user=EXASOL_USER,\n+ password=EXASOL_PASS,\n+ host=EXASOL_HOST,\n+ port=EXASOL_PORT,\n+ **kw,\n+ )\n+\n+ def postload(self, **kw: Any):\n+ self.connection = self.connect(schema=IBIS_TEST_EXASOL_DB, **kw)\n+\n+ @staticmethod\n+ def format_table(name: str) -> str:\n+ return sg.to_identifier(name, quoted=True).sql(\"exasol\")\n \n @property\n def test_files(self) -> Iterable[Path]:\n", "__init__.py": "@@ -1,44 +1,65 @@\n from __future__ import annotations\n \n+import atexit\n+import contextlib\n import re\n-import warnings\n-from collections import ChainMap\n-from contextlib import contextmanager\n from typing import TYPE_CHECKING, Any\n+from urllib.parse import parse_qs, urlparse\n \n-import sqlalchemy as sa\n+import pyexasol\n import sqlglot as sg\n+import sqlglot.expressions as sge\n \n+import ibis\n+import ibis.common.exceptions as com\n+import ibis.expr.datatypes as dt\n+import ibis.expr.operations as ops\n+import ibis.expr.schema as sch\n+import ibis.expr.types as ir\n from ibis import util\n-from ibis.backends.base.sql.alchemy import AlchemyCanCreateSchema, BaseAlchemyBackend\n-from ibis.backends.base.sqlglot.datatypes import PostgresType\n+from ibis.backends.base.sqlglot import SQLGlotBackend\n+from ibis.backends.base.sqlglot.compiler import STAR, C\n from ibis.backends.exasol.compiler import ExasolCompiler\n \n if TYPE_CHECKING:\n- from collections.abc import Iterable, MutableMapping\n+ from collections.abc import Iterable, Mapping\n+\n+ import pandas as pd\n+ import pyarrow as pa\n \n from ibis.backends.base import BaseBackend\n- from ibis.expr import datatypes as dt\n+\n+# strip trailing encodings e.g., UTF8\n+_VARCHAR_REGEX = re.compile(r\"^(VARCHAR(?:\\(\\d+\\)))?(?:\\s+.+)?$\")\n \n \n-class Backend(BaseAlchemyBackend, AlchemyCanCreateSchema):\n+class Backend(SQLGlotBackend):\n name = \"exasol\"\n- compiler = ExasolCompiler\n+ compiler = ExasolCompiler()\n supports_temporary_tables = False\n supports_create_or_replace = False\n supports_in_memory_tables = False\n supports_python_udfs = False\n \n+ @property\n+ def version(self) -> str:\n+ # https://stackoverflow.com/a/67500385\n+ query = (\n+ sg.select(\"param_value\")\n+ .from_(sg.table(\"EXA_METADATA\", catalog=\"SYS\"))\n+ .where(C.param_name.eq(\"databaseProductVersion\"))\n+ )\n+ with self._safe_raw_sql(query) as result:\n+ [(version,)] = result.fetchall()\n+ return version\n+\n def do_connect(\n self,\n user: str,\n password: str,\n host: str = \"localhost\",\n port: int = 8563,\n- schema: str | None = None,\n- encryption: bool = True,\n- certificate_validation: bool = True,\n- encoding: str = \"en_US.UTF-8\",\n+ **kwargs: Any,\n ) -> None:\n \"\"\"Create an Ibis client connected to an Exasol database.\n \n@@ -52,130 +73,286 @@ class Backend(BaseAlchemyBackend, AlchemyCanCreateSchema):\n Hostname to connect to (default: \"localhost\").\n port\n Port number to connect to (default: 8563)\n- schema\n- Database schema to open, if `None`, no schema will be opened.\n- encryption\n- Enables/disables transport layer encryption (default: True).\n- certificate_validation\n- Enables/disables certificate validation (default: True).\n- encoding\n- The encoding format (default: \"en_US.UTF-8\").\n+ kwargs\n+ Additional keyword arguments passed to `pyexasol.connect`.\n \"\"\"\n- options = [\n- \"SSLCertificate=SSL_VERIFY_NONE\" if not certificate_validation else \"\",\n- f\"ENCRYPTION={'yes' if encryption else 'no'}\",\n- f\"CONNECTIONCALL={encoding}\",\n- ]\n- url_template = (\n- \"exa+websocket://{user}:{password}@{host}:{port}/{schema}?{options}\"\n- )\n- url = sa.engine.url.make_url(\n- url_template.format(\n- user=user,\n- password=password,\n- host=host,\n- port=port,\n- schema=schema,\n- options=\"&\".join(options),\n+ if kwargs.pop(\"quote_ident\", None) is not None:\n+ raise com.UnsupportedArgumentError(\n+ \"Setting `quote_ident` to anything other than `True` is not supported. \"\n+ \"Ibis requires all identifiers to be quoted to work correctly.\"\n )\n- )\n- engine = sa.create_engine(url, poolclass=sa.pool.StaticPool)\n- super().do_connect(engine)\n-\n- def _convert_kwargs(self, kwargs: MutableMapping) -> None:\n- def convert_sqla_to_ibis(keyword_arguments):\n- sqla_to_ibis = {\"tls\": \"encryption\", \"username\": \"user\"}\n- for sqla_kwarg, ibis_kwarg in sqla_to_ibis.items():\n- if sqla_kwarg in keyword_arguments:\n- keyword_arguments[ibis_kwarg] = keyword_arguments.pop(sqla_kwarg)\n-\n- def filter_kwargs(keyword_arguments):\n- allowed_parameters = [\n- \"user\",\n- \"password\",\n- \"host\",\n- \"port\",\n- \"schema\",\n- \"encryption\",\n- \"certificate\",\n- \"encoding\",\n- ]\n- to_be_removed = [\n- key for key in keyword_arguments if key not in allowed_parameters\n- ]\n- for parameter_name in to_be_removed:\n- del keyword_arguments[parameter_name]\n \n- convert_sqla_to_ibis(kwargs)\n- filter_kwargs(kwargs)\n+ self.con = pyexasol.connect(\n+ dsn=f\"{host}:{port}\",\n+ user=user,\n+ password=password,\n+ quote_ident=True,\n+ **kwargs,\n+ )\n+ self._temp_views = set()\n \n def _from_url(self, url: str, **kwargs) -> BaseBackend:\n \"\"\"Construct an ibis backend from a SQLAlchemy-conforming URL.\"\"\"\n- kwargs = ChainMap(kwargs)\n- _, new_kwargs = self.inspector.dialect.create_connect_args(url)\n- kwargs = kwargs.new_child(new_kwargs)\n- kwargs = dict(kwargs)\n+ url = urlparse(url)\n+ query_params = parse_qs(url.query)\n+ kwargs = {\n+ \"user\": url.username,\n+ \"password\": url.password,\n+ \"schema\": url.path[1:] or None,\n+ \"host\": url.hostname,\n+ \"port\": url.port,\n+ } | kwargs\n+\n+ for name, value in query_params.items():\n+ if len(value) > 1:\n+ kwargs[name] = value\n+ elif len(value) == 1:\n+ kwargs[name] = value[0]\n+ else:\n+ raise com.IbisError(f\"Invalid URL parameter: {name}\")\n+\n self._convert_kwargs(kwargs)\n \n return self.connect(**kwargs)\n \n- @property\n- def inspector(self):\n- with warnings.catch_warnings():\n- warnings.filterwarnings(\"ignore\", category=sa.exc.RemovedIn20Warning)\n- return super().inspector\n-\n- @contextmanager\n+ @contextlib.contextmanager\n def begin(self):\n- with warnings.catch_warnings():\n- warnings.filterwarnings(\"ignore\", category=sa.exc.RemovedIn20Warning)\n- with super().begin() as con:\n- yield con\n+ # pyexasol doesn't have a cursor method\n+ con = self.con\n+ try:\n+ yield con\n+ except Exception:\n+ con.rollback()\n+ raise\n+ else:\n+ con.commit()\n+\n+ @contextlib.contextmanager\n+ def _safe_raw_sql(self, query: str, *args, **kwargs):\n+ with contextlib.suppress(AttributeError):\n+ query = query.sql(dialect=self.compiler.dialect)\n+\n+ with self.begin() as cur:\n+ yield cur.execute(query, *args, **kwargs)\n \n def list_tables(self, like=None, database=None):\n- with warnings.catch_warnings():\n- warnings.filterwarnings(\"ignore\", category=sa.exc.RemovedIn20Warning)\n- return super().list_tables(like=like, database=database)\n+ tables = sg.select(\"table_name\").from_(\n+ sg.table(\"EXA_ALL_TABLES\", catalog=\"SYS\")\n+ )\n+ views = sg.select(sg.column(\"view_name\").as_(\"table_name\")).from_(\n+ sg.table(\"EXA_ALL_VIEWS\", catalog=\"SYS\")\n+ )\n \n- def _get_sqla_table(\n- self,\n- name: str,\n- autoload: bool = True,\n- **kwargs: Any,\n- ) -> sa.Table:\n- with warnings.catch_warnings():\n- warnings.filterwarnings(\"ignore\", category=sa.exc.RemovedIn20Warning)\n- return super()._get_sqla_table(name=name, autoload=autoload, **kwargs)\n+ if database is not None:\n+ tables = tables.where(sg.column(\"table_schema\").eq(sge.convert(database)))\n+ views = views.where(sg.column(\"view_schema\").eq(sge.convert(database)))\n+\n+ query = sg.union(tables, views)\n+\n+ with self._safe_raw_sql(query) as con:\n+ tables = con.fetchall()\n+\n+ return self._filter_with_like([table for (table,) in tables], like=like)\n+\n+ def get_schema(\n+ self, table_name: str, schema: str | None = None, database: str | None = None\n+ ) -> sch.Schema:\n+ name_type_pairs = self._metadata(\n+ sg.select(STAR)\n+ .from_(\n+ sg.table(\n+ table_name, db=schema, catalog=database, quoted=self.compiler.quoted\n+ )\n+ )\n+ .sql(self.compiler.dialect)\n+ )\n+ return sch.Schema.from_tuples(name_type_pairs)\n+\n+ def _fetch_from_cursor(self, cursor, schema: sch.Schema) -> pd.DataFrame:\n+ import pandas as pd\n+\n+ from ibis.backends.exasol.converter import ExasolPandasData\n+\n+ df = pd.DataFrame.from_records(cursor, columns=schema.names, coerce_float=True)\n+ df = ExasolPandasData.convert_table(df, schema)\n+ return df\n \n def _metadata(self, query: str) -> Iterable[tuple[str, dt.DataType]]:\n- table = sg.table(util.gen_name(\"exasol_metadata\"))\n+ table = sg.table(util.gen_name(\"exasol_metadata\"), quoted=self.compiler.quoted)\n+ dialect = self.compiler.dialect\n create_view = sg.exp.Create(\n- kind=\"VIEW\", this=table, expression=sg.parse_one(query, dialect=\"postgres\")\n+ kind=\"VIEW\",\n+ this=table,\n+ expression=sg.parse_one(query, dialect=dialect),\n )\n drop_view = sg.exp.Drop(kind=\"VIEW\", this=table)\n- describe = sg.exp.Describe(this=table).sql(dialect=\"postgres\")\n- # strip trailing encodings e.g., UTF8\n- varchar_regex = re.compile(r\"^(VARCHAR(?:\\(\\d+\\)))?(?:\\s+.+)?$\")\n- with self.begin() as con:\n- con.exec_driver_sql(create_view.sql(dialect=\"postgres\"))\n+ describe = sg.exp.Describe(this=table)\n+ with self._safe_raw_sql(create_view):\n try:\n yield from (\n (\n name,\n- PostgresType.from_string(varchar_regex.sub(r\"\\1\", typ)),\n+ self.compiler.type_mapper.from_string(\n+ _VARCHAR_REGEX.sub(r\"\\1\", typ)\n+ ),\n )\n- for name, typ, *_ in con.exec_driver_sql(describe)\n+ for name, typ, *_ in self.con.execute(\n+ describe.sql(dialect=dialect)\n+ ).fetchall()\n )\n finally:\n- con.exec_driver_sql(drop_view.sql(dialect=\"postgres\"))\n+ self.con.execute(drop_view.sql(dialect=dialect))\n \n- @property\n- def current_schema(self) -> str:\n- return self._scalar_query(sa.select(sa.text(\"CURRENT_SCHEMA\")))\n+ def _register_in_memory_table(self, op: ops.InMemoryTable) -> None:\n+ schema = op.schema\n+ if null_columns := [col for col, dtype in schema.items() if dtype.is_null()]:\n+ raise com.IbisTypeError(\n+ \"Exasol cannot yet reliably handle `null` typed columns; \"\n+ f\"got null typed columns: {null_columns}\"\n+ )\n+\n+ # only register if we haven't already done so\n+ if (name := op.name) not in self.list_tables():\n+ quoted = self.compiler.quoted\n+ column_defs = [\n+ sg.exp.ColumnDef(\n+ this=sg.to_identifier(colname, quoted=quoted),\n+ kind=self.compiler.type_mapper.from_ibis(typ),\n+ constraints=(\n+ None\n+ if typ.nullable\n+ else [\n+ sg.exp.ColumnConstraint(\n+ kind=sg.exp.NotNullColumnConstraint()\n+ )\n+ ]\n+ ),\n+ )\n+ for colname, typ in schema.items()\n+ ]\n+\n+ ident = sg.to_identifier(name, quoted=quoted)\n+ create_stmt = sg.exp.Create(\n+ kind=\"TABLE\",\n+ this=sg.exp.Schema(this=ident, expressions=column_defs),\n+ )\n+ create_stmt_sql = create_stmt.sql(self.name)\n+\n+ df = op.data.to_frame()\n+ with self._safe_raw_sql(create_stmt_sql):\n+ self.con.import_from_pandas(df, name)\n+\n+ atexit.register(self._clean_up_tmp_table, ident)\n+\n+ def _clean_up_tmp_table(self, ident: sge.Identifier) -> None:\n+ with self._safe_raw_sql(\n+ sge.Drop(kind=\"TABLE\", this=ident, force=True, cascade=True)\n+ ):\n+ pass\n+\n+ def create_table(\n+ self,\n+ name: str,\n+ obj: pd.DataFrame | pa.Table | ir.Table | None = None,\n+ *,\n+ schema: sch.Schema | None = None,\n+ database: str | None = None,\n+ overwrite: bool = False,\n+ ) -> ir.Table:\n+ \"\"\"Create a table in Snowflake.\n+\n+ Parameters\n+ ----------\n+ name\n+ Name of the table to create\n+ obj\n+ The data with which to populate the table; optional, but at least\n+ one of `obj` or `schema` must be specified\n+ schema\n+ The schema of the table to create; optional, but at least one of\n+ `obj` or `schema` must be specified\n+ database\n+ The database in which to create the table; optional\n+ overwrite\n+ If `True`, replace the table if it already exists, otherwise fail\n+ if the table exists\n+ \"\"\"\n+ if obj is None and schema is None:\n+ raise ValueError(\"Either `obj` or `schema` must be specified\")\n+\n+ if database is not None and database != self.current_database:\n+ raise com.UnsupportedOperationError(\n+ \"Creating tables in other databases is not supported by Postgres\"\n+ )\n+ else:\n+ database = None\n+\n+ quoted = self.compiler.quoted\n+\n+ if obj is not None:\n+ if not isinstance(obj, ir.Expr):\n+ table = ibis.memtable(obj)\n+ else:\n+ table = obj\n+\n+ self._run_pre_execute_hooks(table)\n+\n+ query = self._to_sqlglot(table)\n+ else:\n+ query = None\n+\n+ type_mapper = self.compiler.type_mapper\n+ column_defs = [\n+ sge.ColumnDef(\n+ this=sg.to_identifier(colname, quoted=quoted),\n+ kind=type_mapper.from_ibis(typ),\n+ constraints=(\n+ None\n+ if typ.nullable\n+ else [sge.ColumnConstraint(kind=sge.NotNullColumnConstraint())]\n+ ),\n+ )\n+ for colname, typ in (schema or table.schema()).items()\n+ ]\n+\n+ if overwrite:\n+ temp_name = util.gen_name(f\"{self.name}_table\")\n+ else:\n+ temp_name = name\n+\n+ table = sg.table(temp_name, catalog=database, quoted=quoted)\n+ target = sge.Schema(this=table, expressions=column_defs)\n+\n+ create_stmt = sge.Create(kind=\"TABLE\", this=target)\n+\n+ this = sg.table(name, catalog=database, quoted=quoted)\n+ with self._safe_raw_sql(create_stmt):\n+ if query is not None:\n+ self.con.execute(\n+ sge.Insert(this=table, expression=query).sql(self.name)\n+ )\n+\n+ if overwrite:\n+ self.con.execute(\n+ sge.Drop(kind=\"TABLE\", this=this, exists=True).sql(self.name)\n+ )\n+ self.con.execute(\n+ f\"RENAME TABLE {table.sql(self.name)} TO {this.sql(self.name)}\"\n+ )\n+\n+ if schema is None:\n+ return self.table(name, database=database)\n+\n+ # preserve the input schema if it was provided\n+ return ops.DatabaseTable(\n+ name, schema=schema, source=self, namespace=ops.Namespace(database=database)\n+ ).to_expr()\n \n @property\n- def current_database(self) -> str:\n- return None\n+ def current_schema(self) -> str:\n+ with self._safe_raw_sql(\"SELECT CURRENT_SCHEMA\") as cur:\n+ [(schema,)] = cur.fetchall()\n+ return schema\n \n def drop_schema(\n self, name: str, database: str | None = None, force: bool = False\n@@ -184,11 +361,9 @@ class Backend(BaseAlchemyBackend, AlchemyCanCreateSchema):\n raise NotImplementedError(\n \"`database` argument is not supported for the Exasol backend\"\n )\n- drop_schema = sg.exp.Drop(\n- kind=\"SCHEMA\", this=sg.to_identifier(name), exists=force\n- )\n+ drop_schema = sg.exp.Drop(kind=\"SCHEMA\", this=name, exists=force)\n with self.begin() as con:\n- con.exec_driver_sql(drop_schema.sql(dialect=\"postgres\"))\n+ con.execute(drop_schema.sql(dialect=self.compiler.dialect))\n \n def create_schema(\n self, name: str, database: str | None = None, force: bool = False\n@@ -197,20 +372,15 @@ class Backend(BaseAlchemyBackend, AlchemyCanCreateSchema):\n raise NotImplementedError(\n \"`database` argument is not supported for the Exasol backend\"\n )\n- create_schema = sg.exp.Create(\n- kind=\"SCHEMA\", this=sg.to_identifier(name), exists=force\n- )\n+ create_schema = sg.exp.Create(kind=\"SCHEMA\", this=name, exists=force)\n+ open_schema = self.current_schema\n with self.begin() as con:\n- open_schema = self.current_schema\n- con.exec_driver_sql(create_schema.sql(dialect=\"postgres\"))\n+ con.execute(create_schema.sql(dialect=self.compiler.dialect))\n # Exasol implicitly opens the created schema, therefore we need to restore\n # the previous context.\n- action = (\n- sa.text(f\"OPEN SCHEMA {open_schema}\")\n- if open_schema\n- else sa.text(f\"CLOSE SCHEMA {name}\")\n+ con.execute(\n+ f\"OPEN SCHEMA {open_schema}\" if open_schema else f\"CLOSE SCHEMA {name}\"\n )\n- con.exec_driver_sql(action)\n \n def list_schemas(\n self, like: str | None = None, database: str | None = None\n@@ -220,15 +390,25 @@ class Backend(BaseAlchemyBackend, AlchemyCanCreateSchema):\n \"`database` argument is not supported for the Exasol backend\"\n )\n \n- schema, table = \"SYS\", \"EXA_SCHEMAS\"\n- sch = sa.table(\n- table,\n- sa.column(\"schema_name\", sa.TEXT()),\n- schema=schema,\n- )\n+ query = sg.select(\"schema_name\").from_(sg.table(\"EXA_SCHEMAS\", catalog=\"SYS\"))\n \n- query = sa.select(sch.c.schema_name)\n+ with self._safe_raw_sql(query) as con:\n+ schemas = con.fetchall()\n+ return self._filter_with_like([schema for (schema,) in schemas], like=like)\n \n- with self.begin() as con:\n- schemas = list(con.execute(query).scalars())\n- return self._filter_with_like(schemas, like=like)\n+ def _cursor_batches(\n+ self,\n+ expr: ir.Expr,\n+ params: Mapping[ir.Scalar, Any] | None = None,\n+ limit: int | str | None = None,\n+ chunk_size: int = 1 << 20,\n+ ) -> Iterable[list]:\n+ self._run_pre_execute_hooks(expr)\n+\n+ dtypes = expr.as_table().schema().values()\n+\n+ with self._safe_raw_sql(\n+ self.compile(expr, limit=limit, params=params)\n+ ) as cursor:\n+ while batch := cursor.fetchmany(chunk_size):\n+ yield (tuple(map(dt.normalize, dtypes, row)) for row in batch)\n", "compiler.py": "@@ -1,24 +1,225 @@\n from __future__ import annotations\n \n-import sqlalchemy as sa\n+import contextlib\n+from functools import singledispatchmethod\n \n-from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\n-from ibis.backends.exasol import registry\n-from ibis.backends.exasol.datatypes import ExasolSQLType\n+import sqlglot.expressions as sge\n+from sqlglot.dialects import Postgres\n \n+import ibis.common.exceptions as com\n+import ibis.expr.datatypes as dt\n+import ibis.expr.operations as ops\n+from ibis.backends.base.sqlglot.compiler import NULL, SQLGlotCompiler\n+from ibis.backends.base.sqlglot.datatypes import ExasolType\n+from ibis.backends.base.sqlglot.rewrites import (\n+ exclude_unsupported_window_frame_from_ops,\n+ exclude_unsupported_window_frame_from_row_number,\n+ rewrite_empty_order_by_window,\n+)\n+from ibis.common.patterns import replace\n+from ibis.expr.rewrites import p, rewrite_sample, y\n \n-class ExasolExprTranslator(AlchemyExprTranslator):\n- _registry = registry.create()\n- _rewrites = AlchemyExprTranslator._rewrites.copy()\n- _integer_to_timestamp = sa.func.from_unixtime\n- _dialect_name = \"exa.websocket\"\n- native_json_type = False\n- type_mapper = ExasolSQLType\n \n+def _interval(self, e):\n+ \"\"\"Work around Exasol's inability to handle string literals in INTERVAL syntax.\"\"\"\n+ arg = e.args[\"this\"].this\n+ with contextlib.suppress(AttributeError):\n+ arg = arg.sql(self.dialect)\n+ res = f\"INTERVAL '{arg}' {e.args['unit']}\"\n+ return res\n \n-rewrites = ExasolExprTranslator.rewrites\n \n+# Is postgres the best dialect to inherit from?\n+class Exasol(Postgres):\n+ \"\"\"The exasol dialect.\"\"\"\n \n-class ExasolCompiler(AlchemyCompiler):\n- translator_class = ExasolExprTranslator\n- support_values_syntax_in_select = False\n+ class Generator(Postgres.Generator):\n+ TRANSFORMS = Postgres.Generator.TRANSFORMS.copy() | {\n+ sge.Interval: _interval,\n+ }\n+\n+ TYPE_MAPPING = Postgres.Generator.TYPE_MAPPING.copy() | {\n+ sge.DataType.Type.TIMESTAMPTZ: \"TIMESTAMP WITH LOCAL TIME ZONE\",\n+ }\n+\n+\n+@replace(p.WindowFunction(p.MinRank | p.DenseRank, y @ p.WindowFrame(start=None)))\n+def exclude_unsupported_window_frame_from_rank(_, y):\n+ return ops.Subtract(\n+ _.copy(frame=y.copy(start=None, end=0, order_by=y.order_by or (ops.NULL,))), 1\n+ )\n+\n+\n+class ExasolCompiler(SQLGlotCompiler):\n+ __slots__ = ()\n+\n+ dialect = \"exasol\"\n+ type_mapper = ExasolType\n+ quoted = True\n+ rewrites = (\n+ rewrite_sample,\n+ exclude_unsupported_window_frame_from_ops,\n+ exclude_unsupported_window_frame_from_rank,\n+ exclude_unsupported_window_frame_from_row_number,\n+ rewrite_empty_order_by_window,\n+ *SQLGlotCompiler.rewrites,\n+ )\n+\n+ @staticmethod\n+ def _minimize_spec(start, end, spec):\n+ if (\n+ start is None\n+ and isinstance(getattr(end, \"value\", None), ops.Literal)\n+ and end.value.value == 0\n+ and end.following\n+ ):\n+ return None\n+ return spec\n+\n+ def _aggregate(self, funcname: str, *args, where):\n+ func = self.f[funcname]\n+ if where is not None:\n+ args = tuple(self.if_(where, arg, NULL) for arg in args)\n+ return func(*args)\n+\n+ @staticmethod\n+ def _gen_valid_name(name: str) -> str:\n+ \"\"\"Exasol does not allow dots in quoted column names.\"\"\"\n+ return name.replace(\".\", \"_\")\n+\n+ @singledispatchmethod\n+ def visit_node(self, op, **kw):\n+ return super().visit_node(op, **kw)\n+\n+ def visit_NonNullLiteral(self, op, *, value, dtype):\n+ if dtype.is_date():\n+ return self.cast(value.isoformat(), dtype)\n+ elif dtype.is_timestamp():\n+ val = value.replace(tzinfo=None).isoformat(sep=\" \", timespec=\"milliseconds\")\n+ return self.cast(val, dtype)\n+ elif dtype.is_array() or dtype.is_struct() or dtype.is_map():\n+ raise com.UnsupportedBackendType(\n+ f\"{type(dtype).__name__}s are not supported in Exasol\"\n+ )\n+ elif dtype.is_uuid():\n+ return sge.convert(str(value))\n+ return super().visit_NonNullLiteral(op, value=value, dtype=dtype)\n+\n+ @visit_node.register(ops.Date)\n+ def visit_Date(self, op, *, arg):\n+ return self.cast(arg, dt.date)\n+\n+ @visit_node.register(ops.StartsWith)\n+ def visit_StartsWith(self, op, *, arg, start):\n+ return self.f.left(arg, self.f.length(start)).eq(start)\n+\n+ @visit_node.register(ops.EndsWith)\n+ def visit_EndsWith(self, op, *, arg, end):\n+ return self.f.right(arg, self.f.length(end)).eq(end)\n+\n+ @visit_node.register(ops.StringFind)\n+ def visit_StringFind(self, op, *, arg, substr, start, end):\n+ return self.f.locate(substr, arg, (start if start is not None else 0) + 1)\n+\n+ @visit_node.register(ops.StringSQLILike)\n+ def visit_StringSQLILike(self, op, *, arg, pattern, escape):\n+ return self.f.upper(arg).like(self.f.upper(pattern))\n+\n+ @visit_node.register(ops.StringContains)\n+ def visit_StringContains(self, op, *, haystack, needle):\n+ return self.f.locate(needle, haystack) > 0\n+\n+ @visit_node.register(ops.ExtractSecond)\n+ def visit_ExtractSecond(self, op, *, arg):\n+ return self.f.floor(self.cast(self.f.extract(self.v.second, arg), op.dtype))\n+\n+ @visit_node.register(ops.AnalyticVectorizedUDF)\n+ @visit_node.register(ops.ApproxMedian)\n+ @visit_node.register(ops.Arbitrary)\n+ @visit_node.register(ops.ArgMax)\n+ @visit_node.register(ops.ArgMin)\n+ @visit_node.register(ops.ArrayCollect)\n+ @visit_node.register(ops.ArrayDistinct)\n+ @visit_node.register(ops.ArrayFilter)\n+ @visit_node.register(ops.ArrayFlatten)\n+ @visit_node.register(ops.ArrayIntersect)\n+ @visit_node.register(ops.ArrayMap)\n+ @visit_node.register(ops.ArraySort)\n+ @visit_node.register(ops.ArrayStringJoin)\n+ @visit_node.register(ops.ArrayUnion)\n+ @visit_node.register(ops.ArrayZip)\n+ @visit_node.register(ops.BitwiseNot)\n+ @visit_node.register(ops.Covariance)\n+ @visit_node.register(ops.CumeDist)\n+ @visit_node.register(ops.DateAdd)\n+ @visit_node.register(ops.DateDelta)\n+ @visit_node.register(ops.DateSub)\n+ @visit_node.register(ops.DateFromYMD)\n+ @visit_node.register(ops.DayOfWeekIndex)\n+ @visit_node.register(ops.DayOfWeekName)\n+ @visit_node.register(ops.ElementWiseVectorizedUDF)\n+ @visit_node.register(ops.ExtractDayOfYear)\n+ @visit_node.register(ops.ExtractEpochSeconds)\n+ @visit_node.register(ops.ExtractQuarter)\n+ @visit_node.register(ops.ExtractWeekOfYear)\n+ @visit_node.register(ops.First)\n+ @visit_node.register(ops.IntervalFromInteger)\n+ @visit_node.register(ops.IsInf)\n+ @visit_node.register(ops.IsNan)\n+ @visit_node.register(ops.Last)\n+ @visit_node.register(ops.Levenshtein)\n+ @visit_node.register(ops.Median)\n+ @visit_node.register(ops.MultiQuantile)\n+ @visit_node.register(ops.Quantile)\n+ @visit_node.register(ops.ReductionVectorizedUDF)\n+ @visit_node.register(ops.RegexExtract)\n+ @visit_node.register(ops.RegexReplace)\n+ @visit_node.register(ops.RegexSearch)\n+ @visit_node.register(ops.RegexSplit)\n+ @visit_node.register(ops.RowID)\n+ @visit_node.register(ops.StandardDev)\n+ @visit_node.register(ops.Strftime)\n+ @visit_node.register(ops.StringJoin)\n+ @visit_node.register(ops.StringSplit)\n+ @visit_node.register(ops.StringToTimestamp)\n+ @visit_node.register(ops.TimeDelta)\n+ @visit_node.register(ops.TimestampAdd)\n+ @visit_node.register(ops.TimestampBucket)\n+ @visit_node.register(ops.TimestampDelta)\n+ @visit_node.register(ops.TimestampDiff)\n+ @visit_node.register(ops.TimestampNow)\n+ @visit_node.register(ops.TimestampSub)\n+ @visit_node.register(ops.TimestampTruncate)\n+ @visit_node.register(ops.TypeOf)\n+ @visit_node.register(ops.Unnest)\n+ @visit_node.register(ops.Variance)\n+ def visit_Undefined(self, op, **_):\n+ raise com.OperationNotDefinedError(type(op).__name__)\n+\n+ @visit_node.register(ops.CountDistinctStar)\n+ def visit_Unsupported(self, op, **_):\n+ raise com.UnsupportedOperationError(type(op).__name__)\n+\n+\n+_SIMPLE_OPS = {\n+ ops.Log10: \"log10\",\n+ ops.Modulus: \"mod\",\n+ ops.All: \"min\",\n+ ops.Any: \"max\",\n+}\n+\n+for _op, _name in _SIMPLE_OPS.items():\n+ assert isinstance(type(_op), type), type(_op)\n+ if issubclass(_op, ops.Reduction):\n+\n+ @ExasolCompiler.visit_node.register(_op)\n+ def _fmt(self, op, *, _name: str = _name, where, **kw):\n+ return self.agg[_name](*kw.values(), where=where)\n+\n+ else:\n+\n+ @ExasolCompiler.visit_node.register(_op)\n+ def _fmt(self, op, *, _name: str = _name, **kw):\n+ return self.f[_name](*kw.values())\n+\n+ setattr(ExasolCompiler, f\"visit_{_op.__name__}\", _fmt)\n", "converter.py": "@@ -0,0 +1,38 @@\n+from __future__ import annotations\n+\n+import datetime\n+\n+from ibis.formats.pandas import PandasData\n+\n+\n+class ExasolPandasData(PandasData):\n+ @classmethod\n+ def convert_String(cls, s, dtype, pandas_type):\n+ if s.dtype != \"object\":\n+ return s.map(str)\n+ else:\n+ return s\n+\n+ @classmethod\n+ def convert_Interval(cls, s, dtype, pandas_dtype):\n+ def parse_timedelta(value):\n+ # format is '(+|-)days hour:minute:second.millisecond'\n+ days, rest = value.split(\" \", 1)\n+ hms, millis = rest.split(\".\", 1)\n+ hours, minutes, seconds = hms.split(\":\")\n+ return datetime.timedelta(\n+ days=int(days),\n+ hours=int(hours),\n+ minutes=int(minutes),\n+ seconds=int(seconds),\n+ milliseconds=int(millis),\n+ )\n+\n+ if s.dtype == \"int64\":\n+ # exasol can return intervals as the number of integer days (e.g.,\n+ # from subtraction of two dates)\n+ #\n+ # TODO: investigate whether days are the only interval ever\n+ # returned as integers\n+ return s.map(lambda days: datetime.timedelta(days=days))\n+ return s.map(parse_timedelta, na_action=\"ignore\")\n", "registry.py": "@@ -1,46 +0,0 @@\n-from __future__ import annotations\n-\n-import sqlalchemy as sa\n-\n-import ibis.expr.operations as ops\n-\n-# used for literal translate\n-from ibis.backends.base.sql.alchemy import (\n- fixed_arity,\n- sqlalchemy_operation_registry,\n-)\n-\n-\n-class _String:\n- @staticmethod\n- def find(t, op):\n- args = [t.translate(op.substr), t.translate(op.arg)]\n- if (start := op.start) is not None:\n- args.append(t.translate(start) + 1)\n- return sa.func.locate(*args) - 1\n-\n- @staticmethod\n- def translate(t, op):\n- func = fixed_arity(sa.func.translate, 3)\n- return func(t, op)\n-\n-\n-class _Registry:\n- _unsupported = {ops.StringJoin}\n-\n- _supported = {\n- ops.Translate: _String.translate,\n- ops.StringFind: _String.find,\n- }\n-\n- @classmethod\n- def create(cls):\n- registry = sqlalchemy_operation_registry.copy()\n- registry = {k: v for k, v in registry.items() if k not in cls._unsupported}\n- registry.update(cls._supported)\n- return registry\n-\n-\n-def create():\n- \"\"\"Create an operation registry for an Exasol backend.\"\"\"\n- return _Registry.create()\n", "out.sql": "@@ -0,0 +1,9 @@\n+SELECT\n+ \"t0\".\"x\" IN (\n+ SELECT\n+ \"t0\".\"x\"\n+ FROM \"t\" AS \"t0\"\n+ WHERE\n+ \"t0\".\"x\" > 2\n+ ) AS \"InSubquery(x)\"\n+FROM \"t\" AS \"t0\"\n\\ No newline at end of file\n", "test_aggregation.py": "@@ -769,7 +769,7 @@ def test_aggregate_multikey_group_reduction_udf(backend, alltypes, df):\n lambda _: slice(None),\n marks=pytest.mark.notimpl(\n [\"exasol\"],\n- raises=(com.OperationNotDefinedError, ExaQueryError, sa.exc.DBAPIError),\n+ raises=(com.OperationNotDefinedError, ExaQueryError),\n strict=False,\n ),\n id=\"no_cond\",\n@@ -849,9 +849,7 @@ def test_reduction_ops(\n raises=com.OperationNotDefinedError,\n reason=\"no one has attempted implementation yet\",\n )\[email protected](\n- [\"exasol\"], raises=(sa.exc.DBAPIError, com.UnsupportedOperationError)\n-)\[email protected]([\"exasol\"], raises=com.UnsupportedOperationError)\n def test_count_distinct_star(alltypes, df, ibis_cond, pandas_cond):\n table = alltypes[[\"int_col\", \"double_col\", \"string_col\"]]\n expr = table.nunique(where=ibis_cond(table))\n@@ -920,12 +918,11 @@ def test_count_distinct_star(alltypes, df, ibis_cond, pandas_cond):\n \"sqlite\",\n \"druid\",\n \"oracle\",\n- \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n pytest.mark.notyet(\n- [\"mysql\", \"impala\"], raises=com.UnsupportedBackendType\n+ [\"mysql\", \"impala\", \"exasol\"], raises=com.UnsupportedBackendType\n ),\n pytest.mark.notyet(\n [\"snowflake\"],\n@@ -1153,8 +1150,7 @@ def test_quantile(\n ),\n ],\n )\[email protected]([\"mssql\"], raises=com.OperationNotDefinedError)\[email protected]([\"exasol\"], raises=AttributeError)\[email protected]([\"mssql\", \"exasol\"], raises=com.OperationNotDefinedError)\n def test_corr_cov(\n con,\n batting,\n@@ -1597,8 +1593,9 @@ def test_grouped_case(backend, con):\n \n \n @pytest.mark.notimpl(\n- [\"datafusion\", \"mssql\", \"polars\", \"exasol\"], raises=com.OperationNotDefinedError\n+ [\"datafusion\", \"mssql\", \"polars\"], raises=com.OperationNotDefinedError\n )\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.broken(\n [\"dask\"],\n reason=\"Dask does not windowize this operation correctly\",\n", "test_asof_join.py": "@@ -91,6 +91,7 @@ def time_keyed_right(time_keyed_df2):\n \"druid\",\n \"impala\",\n \"bigquery\",\n+ \"exasol\",\n ]\n )\n def test_asof_join(con, time_left, time_right, time_df1, time_df2, direction, op):\n@@ -127,6 +128,7 @@ def test_asof_join(con, time_left, time_right, time_df1, time_df2, direction, op\n \"druid\",\n \"impala\",\n \"bigquery\",\n+ \"exasol\",\n ]\n )\n def test_keyed_asof_join_with_tolerance(\n", "test_binary.py": "@@ -3,7 +3,6 @@ from __future__ import annotations\n import contextlib\n \n import pytest\n-import sqlalchemy.exc\n \n import ibis\n import ibis.common.exceptions as com\n@@ -29,7 +28,7 @@ BINARY_BACKEND_TYPES = {\n @pytest.mark.notimpl(\n [\"exasol\"],\n \"Exasol does not have native support for a binary data type.\",\n- raises=sqlalchemy.exc.StatementError,\n+ raises=NotImplementedError,\n )\n def test_binary_literal(con, backend):\n expr = ibis.literal(b\"A\")\n", "test_dot_sql.py": "@@ -24,6 +24,7 @@ pytestmark = [pytest.mark.xdist_group(\"dot_sql\")]\n \n _NAMES = {\n \"bigquery\": \"ibis_gbq_testing.functional_alltypes\",\n+ \"exasol\": '\"functional_alltypes\"',\n }\n \n \n@@ -38,17 +39,20 @@ _NAMES = {\n ],\n )\n def test_con_dot_sql(backend, con, schema):\n- alltypes = con.table(\"functional_alltypes\")\n+ alltypes = backend.functional_alltypes\n # pull out the quoted name\n- name = _NAMES.get(con.name, alltypes.op().name)\n+ name = _NAMES.get(con.name, \"functional_alltypes\")\n+ quoted = getattr(getattr(con, \"compiler\", None), \"quoted\", True)\n+ dialect = _IBIS_TO_SQLGLOT_DIALECT.get(con.name, con.name)\n+ cols = [\n+ sg.column(\"string_col\", quoted=quoted).as_(\"s\", quoted=quoted).sql(dialect),\n+ (sg.column(\"double_col\", quoted=quoted) + 1.0)\n+ .as_(\"new_col\", quoted=quoted)\n+ .sql(dialect),\n+ ]\n t = (\n con.sql(\n- f\"\"\"\n- SELECT\n- string_col as s,\n- double_col + 1.0 AS new_col\n- FROM {name}\n- \"\"\",\n+ f\"SELECT {', '.join(cols)} FROM {name}\",\n schema=schema,\n )\n .group_by(\"s\") # group by a column from SQL\n", "test_export.py": "@@ -13,6 +13,7 @@ from ibis import util\n from ibis.backends.tests.errors import (\n DuckDBNotImplementedException,\n DuckDBParserException,\n+ ExaQueryError,\n MySQLOperationalError,\n PyDeltaTableError,\n PyDruidProgrammingError,\n@@ -97,7 +98,6 @@ def test_empty_column_to_pyarrow(limit, awards_players):\n \n \n @pytest.mark.parametrize(\"limit\", no_limit)\[email protected]([\"exasol\"], raises=AttributeError)\n def test_empty_scalar_to_pyarrow(limit, awards_players):\n expr = awards_players.filter(awards_players.awardID == \"DEADBEEF\").yearID.sum()\n array = expr.to_pyarrow(limit=limit)\n@@ -105,7 +105,6 @@ def test_empty_scalar_to_pyarrow(limit, awards_players):\n \n \n @pytest.mark.parametrize(\"limit\", no_limit)\[email protected]([\"exasol\"], raises=AttributeError)\n def test_scalar_to_pyarrow_scalar(limit, awards_players):\n scalar = awards_players.yearID.sum().to_pyarrow(limit=limit)\n assert isinstance(scalar, pa.Scalar)\n@@ -209,7 +208,9 @@ def test_table_to_parquet(tmp_path, backend, awards_players):\n \n df = pd.read_parquet(outparquet)\n \n- backend.assert_frame_equal(awards_players.to_pandas(), df)\n+ backend.assert_frame_equal(\n+ awards_players.to_pandas().fillna(pd.NA), df.fillna(pd.NA)\n+ )\n \n \n @pytest.mark.notimpl(\n@@ -224,7 +225,9 @@ def test_table_to_parquet_writer_kwargs(version, tmp_path, backend, awards_playe\n \n df = pd.read_parquet(outparquet)\n \n- backend.assert_frame_equal(awards_players.to_pandas(), df)\n+ backend.assert_frame_equal(\n+ awards_players.to_pandas().fillna(pd.NA), df.fillna(pd.NA)\n+ )\n \n md = pa.parquet.read_metadata(outparquet)\n \n@@ -297,7 +300,7 @@ def test_memtable_to_file(tmp_path, con, ftype, monkeypatch):\n assert outfile.is_file()\n \n \[email protected]([\"exasol\"])\[email protected]([\"flink\"])\n def test_table_to_csv(tmp_path, backend, awards_players):\n outcsv = tmp_path / \"out.csv\"\n \n@@ -311,7 +314,7 @@ def test_table_to_csv(tmp_path, backend, awards_players):\n backend.assert_frame_equal(awards_players.to_pandas(), df)\n \n \[email protected]([\"exasol\"])\[email protected]([\"flink\"])\n @pytest.mark.notimpl(\n [\"duckdb\"],\n reason=\"cannot inline WriteOptions objects\",\n@@ -337,12 +340,12 @@ def test_table_to_csv_writer_kwargs(delimiter, tmp_path, awards_players):\n id=\"decimal128\",\n marks=[\n pytest.mark.notyet([\"flink\"], raises=NotImplementedError),\n- pytest.mark.notyet([\"exasol\"], raises=sa.exc.DBAPIError),\n pytest.mark.notyet(\n [\"risingwave\"],\n raises=sa.exc.DBAPIError,\n reason=\"Feature is not yet implemented: unsupported data type: NUMERIC(38,9)\",\n ),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n ],\n ),\n param(\n@@ -362,12 +365,13 @@ def test_table_to_csv_writer_kwargs(delimiter, tmp_path, awards_players):\n raises=(PySparkParseException, PySparkArithmeticException),\n reason=\"precision is out of range\",\n ),\n- pytest.mark.notyet([\"exasol\"], raises=sa.exc.DBAPIError),\n pytest.mark.notyet(\n [\"risingwave\"],\n raises=sa.exc.DBAPIError,\n reason=\"Feature is not yet implemented: unsupported data type: NUMERIC(76,38)\",\n ),\n+ pytest.mark.notyet([\"flink\"], raises=NotImplementedError),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n ],\n ),\n ],\n@@ -495,7 +499,6 @@ def test_to_pandas_batches_empty_table(backend, con):\n param(\n None,\n marks=[\n- pytest.mark.notimpl([\"exasol\"], raises=sa.exc.CompileError),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -520,7 +523,6 @@ def test_to_pandas_batches_nonempty_table(backend, con, n):\n param(\n None,\n marks=[\n- pytest.mark.notimpl([\"exasol\"], raises=sa.exc.CompileError),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n", "test_generic.py": "@@ -72,7 +72,6 @@ BOOLEAN_BACKEND_TYPE = {\n }\n \n \[email protected]([\"exasol\"])\n def test_boolean_literal(con, backend):\n expr = ibis.literal(False, type=dt.boolean)\n result = con.execute(expr)\n@@ -106,32 +105,34 @@ def test_scalar_fillna_nullif(con, expr, expected):\n \n \n @pytest.mark.parametrize(\n- (\"col\", \"filt\"),\n+ (\"col\", \"value\", \"filt\"),\n [\n param(\n \"nan_col\",\n- _.nan_col.isnan(),\n- marks=pytest.mark.notimpl([\"mysql\", \"sqlite\"]),\n+ ibis.literal(np.nan),\n+ methodcaller(\"isnan\"),\n+ marks=[\n+ pytest.mark.notimpl([\"mysql\", \"sqlite\", \"druid\"]),\n+ pytest.mark.notyet(\n+ [\"exasol\"],\n+ raises=ExaQueryError,\n+ reason=\"no way to test for nan-ness\",\n+ ),\n+ ],\n id=\"nan_col\",\n ),\n param(\n- \"none_col\",\n- _.none_col.isnull(),\n- marks=[pytest.mark.notimpl([\"mysql\"])],\n- id=\"none_col\",\n+ \"none_col\", ibis.NA.cast(\"float64\"), methodcaller(\"isnull\"), id=\"none_col\"\n ),\n ],\n )\[email protected]([\"mssql\", \"druid\", \"oracle\"])\[email protected]([\"mssql\", \"oracle\"])\n @pytest.mark.notyet([\"flink\"], \"NaN is not supported in Flink SQL\", raises=ValueError)\[email protected]([\"exasol\"], raises=com.OperationNotDefinedError, strict=False)\n-def test_isna(backend, alltypes, col, filt):\n- table = alltypes.select(\n- nan_col=ibis.literal(np.nan), none_col=ibis.NA.cast(\"float64\")\n- )\n+def test_isna(backend, alltypes, col, value, filt):\n+ table = alltypes.select(**{col: value})\n df = table.execute()\n \n- result = table[filt].execute().reset_index(drop=True)\n+ result = table[filt(table[col])].execute().reset_index(drop=True)\n expected = df[df[col].isna()].reset_index(drop=True)\n \n backend.assert_frame_equal(result, expected)\n@@ -569,10 +570,6 @@ def test_order_by_random(alltypes):\n raises=PyDruidProgrammingError,\n reason=\"Druid only supports trivial unions\",\n )\[email protected](\n- [\"exasol\"],\n- raises=AssertionError,\n-)\n def test_table_info(alltypes):\n expr = alltypes.info()\n df = expr.execute()\n@@ -592,18 +589,8 @@ def test_table_info(alltypes):\n @pytest.mark.parametrize(\n (\"ibis_op\", \"pandas_op\"),\n [\n- param(\n- _.string_col.isin([]),\n- lambda df: df.string_col.isin([]),\n- marks=pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError),\n- id=\"isin\",\n- ),\n- param(\n- _.string_col.notin([]),\n- lambda df: ~df.string_col.isin([]),\n- marks=pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError),\n- id=\"notin\",\n- ),\n+ param(_.string_col.isin([]), lambda df: df.string_col.isin([]), id=\"isin\"),\n+ param(_.string_col.notin([]), lambda df: ~df.string_col.isin([]), id=\"notin\"),\n param(\n (_.string_col.length() * 1).isin([1]),\n lambda df: (df.string_col.str.len() * 1).isin([1]),\n@@ -674,7 +661,6 @@ def test_isin_notin_column_expr(backend, alltypes, df, ibis_op, pandas_op):\n param(False, True, neg, id=\"false_negate\"),\n ],\n )\[email protected]([\"exasol\"])\n def test_logical_negation_literal(con, expr, expected, op):\n assert con.execute(op(ibis.literal(expr)).name(\"tmp\")) == expected\n \n@@ -827,7 +813,7 @@ def test_int_scalar(alltypes):\n assert result.dtype == np.int32\n \n \[email protected]([\"dask\", \"datafusion\", \"pandas\", \"polars\", \"druid\", \"exasol\"])\[email protected]([\"dask\", \"datafusion\", \"pandas\", \"polars\", \"druid\"])\n @pytest.mark.notyet(\n [\"clickhouse\"], reason=\"https://github.com/ClickHouse/ClickHouse/issues/6697\"\n )\n@@ -871,12 +857,12 @@ def test_typeof(con):\n @pytest.mark.notimpl([\"datafusion\", \"druid\"])\n @pytest.mark.notimpl([\"pyspark\"], condition=is_older_than(\"pyspark\", \"3.5.0\"))\n @pytest.mark.notyet([\"dask\", \"mssql\"], reason=\"not supported by the backend\")\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n @pytest.mark.broken(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n reason=\"https://github.com/risingwavelabs/risingwave/issues/1343\",\n )\[email protected]([\"exasol\"], raises=ExaQueryError, reason=\"not supported by exasol\")\n def test_isin_uncorrelated(\n backend, batting, awards_players, batting_df, awards_players_df\n ):\n@@ -896,7 +882,7 @@ def test_isin_uncorrelated(\n \n \n @pytest.mark.broken([\"polars\"], reason=\"incorrect answer\")\[email protected]([\"druid\", \"exasol\"])\[email protected]([\"druid\"])\n @pytest.mark.notyet([\"dask\"], reason=\"not supported by the backend\")\n def test_isin_uncorrelated_filter(\n backend, batting, awards_players, batting_df, awards_players_df\n@@ -921,7 +907,14 @@ def test_isin_uncorrelated_filter(\n \"dtype\",\n [\n \"bool\",\n- \"bytes\",\n+ param(\n+ \"bytes\",\n+ marks=[\n+ pytest.mark.notyet(\n+ [\"exasol\"], raises=ExaQueryError, reason=\"no binary type\"\n+ )\n+ ],\n+ ),\n \"str\",\n \"int\",\n \"float\",\n@@ -933,7 +926,14 @@ def test_isin_uncorrelated_filter(\n \"float64\",\n \"timestamp\",\n \"date\",\n- \"time\",\n+ param(\n+ \"time\",\n+ marks=[\n+ pytest.mark.notyet(\n+ [\"exasol\"], raises=ExaQueryError, reason=\"no time type\"\n+ )\n+ ],\n+ ),\n ],\n )\n def test_literal_na(con, dtype):\n@@ -942,8 +942,7 @@ def test_literal_na(con, dtype):\n assert pd.isna(result)\n \n \[email protected]([\"exasol\"])\n-def test_memtable_bool_column(backend, con):\n+def test_memtable_bool_column(con):\n data = [True, False, True]\n t = ibis.memtable({\"a\": data})\n assert Counter(con.execute(t.a)) == Counter(data)\n@@ -1352,7 +1351,6 @@ def test_hexdigest(backend, alltypes):\n \"risingwave\",\n \"snowflake\",\n \"sqlite\",\n- \"exasol\",\n ]\n )\n @pytest.mark.parametrize(\n@@ -1370,6 +1368,7 @@ def test_hexdigest(backend, alltypes):\n pytest.mark.notyet([\"duckdb\", \"impala\"], reason=\"casts to NULL\"),\n pytest.mark.notyet([\"bigquery\"], raises=GoogleBadRequest),\n pytest.mark.notyet([\"trino\"], raises=TrinoUserError),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n pytest.mark.broken(\n [\"druid\"], reason=\"casts to 1672531200000 (millisecond)\"\n ),\n@@ -1512,10 +1511,6 @@ def test_try_cast_func(con, from_val, to_type, func):\n slice(None, None),\n lambda t: t.count().to_pandas(),\n marks=[\n- pytest.mark.notyet(\n- [\"exasol\"],\n- raises=sa.exc.CompileError,\n- ),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1529,10 +1524,6 @@ def test_try_cast_func(con, from_val, to_type, func):\n slice(0, None),\n lambda t: t.count().to_pandas(),\n marks=[\n- pytest.mark.notyet(\n- [\"exasol\"],\n- raises=sa.exc.CompileError,\n- ),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1563,8 +1554,13 @@ def test_try_cast_func(con, from_val, to_type, func):\n pytest.mark.never(\n [\"impala\"],\n raises=ImpalaHiveServer2Error,\n- reason=\"impala doesn't support OFFSET without ORDER BY\",\n- )\n+ reason=\"doesn't support OFFSET without ORDER BY\",\n+ ),\n+ pytest.mark.notyet(\n+ [\"exasol\"],\n+ raises=ExaQueryError,\n+ reason=\"doesn't support OFFSET without ORDER BY\",\n+ ),\n ],\n ),\n param(\n@@ -1582,10 +1578,7 @@ def test_try_cast_func(con, from_val, to_type, func):\n raises=sa.exc.CompileError,\n reason=\"mssql doesn't support OFFSET without LIMIT\",\n ),\n- pytest.mark.notyet(\n- [\"exasol\"],\n- raises=sa.exc.CompileError,\n- ),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n pytest.mark.never(\n [\"impala\"],\n raises=ImpalaHiveServer2Error,\n@@ -1607,8 +1600,13 @@ def test_try_cast_func(con, from_val, to_type, func):\n pytest.mark.never(\n [\"impala\"],\n raises=ImpalaHiveServer2Error,\n- reason=\"impala doesn't support OFFSET without ORDER BY\",\n- )\n+ reason=\"doesn't support OFFSET without ORDER BY\",\n+ ),\n+ pytest.mark.notyet(\n+ [\"exasol\"],\n+ raises=ExaQueryError,\n+ reason=\"doesn't support OFFSET without ORDER BY\",\n+ ),\n ],\n ),\n param(\n@@ -1621,10 +1619,7 @@ def test_try_cast_func(con, from_val, to_type, func):\n raises=sa.exc.CompileError,\n reason=\"mssql doesn't support OFFSET without LIMIT\",\n ),\n- pytest.mark.notyet(\n- [\"exasol\"],\n- raises=sa.exc.DBAPIError,\n- ),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n pytest.mark.notyet(\n [\"impala\"],\n raises=ImpalaHiveServer2Error,\n@@ -1693,10 +1688,7 @@ def test_static_table_slice(backend, slc, expected_count_fn):\n raises=sa.exc.InternalError,\n reason=\"risingwave doesn't support limit/offset\",\n )\[email protected](\n- [\"exasol\"],\n- raises=sa.exc.CompileError,\n-)\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notyet(\n [\"clickhouse\"],\n raises=ClickHouseDatabaseError,\n@@ -1746,7 +1738,7 @@ def test_dynamic_table_slice(backend, slc, expected_count_fn):\n raises=TrinoUserError,\n reason=\"backend doesn't support dynamic limit/offset\",\n )\[email protected]([\"exasol\"], raises=sa.exc.CompileError)\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notyet(\n [\"clickhouse\"],\n raises=ClickHouseDatabaseError,\n@@ -1800,7 +1792,6 @@ def test_dynamic_table_slice_with_computed_offset(backend):\n \"flink\",\n \"polars\",\n \"snowflake\",\n- \"exasol\",\n ]\n )\n @pytest.mark.notimpl(\n@@ -1829,7 +1820,6 @@ def test_sample(backend):\n \"flink\",\n \"polars\",\n \"snowflake\",\n- \"exasol\",\n ]\n )\n @pytest.mark.notimpl(\n@@ -1893,7 +1883,6 @@ def test_substitute(backend):\n [\"dask\", \"pandas\", \"polars\"], raises=NotImplementedError, reason=\"not a SQL backend\"\n )\n @pytest.mark.notimpl([\"flink\"], reason=\"no sqlglot dialect\", raises=ValueError)\[email protected]([\"exasol\"], raises=ValueError, reason=\"unknown dialect\")\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=ValueError,\n", "test_join.py": "@@ -5,7 +5,6 @@ import sqlite3\n import numpy as np\n import pandas as pd\n import pytest\n-import sqlalchemy as sa\n from packaging.version import parse as vparse\n from pytest import param\n \n@@ -43,7 +42,14 @@ def check_eq(left, right, how, **kwargs):\n [\n \"inner\",\n \"left\",\n- \"right\",\n+ param(\n+ \"right\",\n+ marks=[\n+ pytest.mark.broken(\n+ [\"exasol\"], raises=AssertionError, reasons=\"results don't match\"\n+ )\n+ ],\n+ ),\n param(\n \"outer\",\n # TODO: mysql will likely never support full outer join\n@@ -55,12 +61,14 @@ def check_eq(left, right, how, **kwargs):\n + [\"sqlite\"] * (vparse(sqlite3.sqlite_version) < vparse(\"3.39\"))\n ),\n pytest.mark.xfail_version(datafusion=[\"datafusion<31\"]),\n+ pytest.mark.broken(\n+ [\"exasol\"], raises=AssertionError, reasons=\"results don't match\"\n+ ),\n ],\n ),\n ],\n )\n @pytest.mark.notimpl([\"druid\"])\[email protected]([\"exasol\"], raises=AttributeError)\n def test_mutating_join(backend, batting, awards_players, how):\n left = batting[batting.yearID == 2015]\n right = awards_players[awards_players.lgID == \"NL\"].drop(\"yearID\", \"lgID\")\n@@ -109,7 +117,7 @@ def test_mutating_join(backend, batting, awards_players, how):\n \n \n @pytest.mark.parametrize(\"how\", [\"semi\", \"anti\"])\[email protected]([\"dask\", \"druid\", \"exasol\"])\[email protected]([\"dask\", \"druid\"])\n @pytest.mark.notyet([\"flink\"], reason=\"Flink doesn't support semi joins or anti joins\")\n def test_filtering_join(backend, batting, awards_players, how):\n left = batting[batting.yearID == 2015]\n@@ -139,7 +147,6 @@ def test_filtering_join(backend, batting, awards_players, how):\n backend.assert_frame_equal(result, expected, check_like=True)\n \n \[email protected]([\"exasol\"], raises=com.IbisTypeError)\n def test_join_then_filter_no_column_overlap(awards_players, batting):\n left = batting[batting.yearID == 2015]\n year = left.yearID.name(\"year\")\n@@ -152,7 +159,6 @@ def test_join_then_filter_no_column_overlap(awards_players, batting):\n assert not q.execute().empty\n \n \[email protected]([\"exasol\"], raises=com.IbisTypeError)\n def test_mutate_then_join_no_column_overlap(batting, awards_players):\n left = batting.mutate(year=batting.yearID).filter(lambda t: t.year == 2015)\n left = left[\"year\", \"RBI\"]\n@@ -175,7 +181,6 @@ def test_mutate_then_join_no_column_overlap(batting, awards_players):\n param(lambda left, right: left.join(right, \"year\", how=\"semi\"), id=\"how_semi\"),\n ],\n )\[email protected]([\"exasol\"], raises=com.IbisTypeError)\n def test_semi_join_topk(batting, awards_players, func):\n batting = batting.mutate(year=batting.yearID)\n left = func(batting, batting.year.topk(5)).select(\"year\", \"RBI\")\n@@ -198,7 +203,7 @@ def test_join_with_pandas(batting, awards_players):\n assert df.yearID.nunique() == 7\n \n \[email protected]([\"dask\", \"exasol\"])\[email protected]([\"dask\"])\n def test_join_with_pandas_non_null_typed_columns(batting, awards_players):\n batting_filt = batting[lambda t: t.yearID < 1900][[\"yearID\"]]\n awards_players_filt = awards_players[lambda t: t.yearID < 1900][\n@@ -271,10 +276,6 @@ def test_join_with_pandas_non_null_typed_columns(batting, awards_players):\n raises=TypeError,\n reason=\"dask doesn't support join predicates\",\n )\[email protected](\n- [\"exasol\"],\n- raises=com.IbisTypeError,\n-)\n def test_join_with_trivial_predicate(awards_players, predicate, how, pandas_value):\n n = 5\n \n@@ -299,9 +300,6 @@ outer_join_nullability_failures = [pytest.mark.notyet([\"sqlite\"])] * (\n )\n \n \[email protected](\n- [\"exasol\"], raises=sa.exc.NoSuchTableError, reason=\"`win` table isn't loaded\"\n-)\n @pytest.mark.notimpl([\"druid\"], raises=PyDruidProgrammingError)\n @pytest.mark.notimpl([\"flink\"], reason=\"`win` table isn't loaded\")\n @pytest.mark.parametrize(\n", "test_numeric.py": "@@ -191,10 +191,6 @@ from ibis.tests.util import assert_equal\n \"Expected np.float16 instance\",\n raises=ArrowNotImplementedError,\n ),\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=ExaQueryError,\n- ),\n ],\n id=\"float16\",\n ),\n@@ -212,12 +208,6 @@ from ibis.tests.util import assert_equal\n \"risingwave\": \"numeric\",\n \"flink\": \"FLOAT NOT NULL\",\n },\n- marks=[\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=ExaQueryError,\n- ),\n- ],\n id=\"float32\",\n ),\n param(\n@@ -234,12 +224,6 @@ from ibis.tests.util import assert_equal\n \"risingwave\": \"numeric\",\n \"flink\": \"DOUBLE NOT NULL\",\n },\n- marks=[\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=ExaQueryError,\n- ),\n- ],\n id=\"float64\",\n ),\n ],\n@@ -265,6 +249,7 @@ def test_numeric_literal(con, backend, expr, expected_types):\n \"sqlite\": 1.1,\n \"trino\": decimal.Decimal(\"1.1\"),\n \"dask\": decimal.Decimal(\"1.1\"),\n+ \"exasol\": decimal.Decimal(\"1\"),\n \"duckdb\": decimal.Decimal(\"1.1\"),\n \"risingwave\": 1.1,\n \"impala\": decimal.Decimal(\"1\"),\n@@ -281,6 +266,7 @@ def test_numeric_literal(con, backend, expr, expected_types):\n {\n \"bigquery\": \"NUMERIC\",\n \"snowflake\": \"DECIMAL\",\n+ \"exasol\": \"DECIMAL(18,0)\",\n \"sqlite\": \"real\",\n \"impala\": \"DECIMAL(9,0)\",\n \"trino\": \"decimal(18,3)\",\n@@ -290,10 +276,9 @@ def test_numeric_literal(con, backend, expr, expected_types):\n \"flink\": \"DECIMAL(38, 18) NOT NULL\",\n },\n marks=[\n- pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError),\n pytest.mark.notimpl(\n [\"clickhouse\"],\n- \"Unsupported precision. Supported values: [1 : 76]. Current value: None\",\n+ reason=\"precision must be specified; clickhouse doesn't have a default\",\n raises=NotImplementedError,\n ),\n ],\n@@ -464,6 +449,7 @@ def test_numeric_literal(con, backend, expr, expected_types):\n raises=SnowflakeProgrammingError,\n ),\n pytest.mark.notyet([\"bigquery\"], raises=GoogleBadRequest),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n ],\n id=\"decimal-infinity+\",\n ),\n@@ -540,6 +526,7 @@ def test_numeric_literal(con, backend, expr, expected_types):\n reason=\"can't cast infinity to decimal\",\n ),\n pytest.mark.notyet([\"bigquery\"], raises=GoogleBadRequest),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n ],\n id=\"decimal-infinity-\",\n ),\n@@ -628,6 +615,7 @@ def test_numeric_literal(con, backend, expr, expected_types):\n reason=\"can't cast nan to decimal\",\n ),\n pytest.mark.notyet([\"bigquery\"], raises=GoogleBadRequest),\n+ pytest.mark.notyet([\"exasol\"], raises=ExaQueryError),\n ],\n id=\"decimal-NaN\",\n ),\n@@ -767,33 +755,13 @@ def test_isnan_isinf(\n [\"datafusion\"], raises=com.OperationNotDefinedError\n ),\n ),\n- param(\n- L(5.5).round(),\n- 6.0,\n- id=\"round\",\n- ),\n- param(\n- L(5.556).round(2),\n- 5.56,\n- id=\"round-digits\",\n- ),\n+ param(L(5.5).round(), 6.0, id=\"round\"),\n+ param(L(5.556).round(2), 5.56, id=\"round-digits\"),\n param(L(5.556).ceil(), 6.0, id=\"ceil\"),\n param(L(5.556).floor(), 5.0, id=\"floor\"),\n- param(\n- L(5.556).exp(),\n- math.exp(5.556),\n- id=\"exp\",\n- ),\n- param(\n- L(5.556).sign(),\n- 1,\n- id=\"sign-pos\",\n- ),\n- param(\n- L(-5.556).sign(),\n- -1,\n- id=\"sign-neg\",\n- ),\n+ param(L(5.556).exp(), math.exp(5.556), id=\"exp\"),\n+ param(L(5.556).sign(), 1, id=\"sign-pos\"),\n+ param(L(-5.556).sign(), -1, id=\"sign-neg\"),\n param(\n L(0).sign(),\n 0,\n@@ -810,10 +778,6 @@ def test_isnan_isinf(\n math.log(5.556, 2),\n id=\"log-base\",\n marks=[\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=com.OperationNotDefinedError,\n- ),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -827,15 +791,12 @@ def test_isnan_isinf(\n math.log(5.556),\n id=\"ln\",\n ),\n+ param(L(5.556).ln(), math.log(5.556), id=\"ln\"),\n param(\n L(5.556).log2(),\n math.log(5.556, 2),\n id=\"log2\",\n marks=[\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=com.OperationNotDefinedError,\n- ),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -866,6 +827,10 @@ def test_isnan_isinf(\n marks=pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError),\n id=\"mod\",\n ),\n+ param(L(5.556).log10(), math.log10(5.556), id=\"log10\"),\n+ param(L(5.556).radians(), math.radians(5.556), id=\"radians\"),\n+ param(L(5.556).degrees(), math.degrees(5.556), id=\"degrees\"),\n+ param(L(11) % 3, 11 % 3, id=\"mod\"),\n ],\n )\n def test_math_functions_literals(con, expr, expected):\n@@ -998,7 +963,6 @@ def test_simple_math_functions_columns(\n lambda t: t.double_col.add(1).log(2),\n lambda t: np.log2(t.double_col + 1),\n marks=[\n- pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1016,7 +980,6 @@ def test_simple_math_functions_columns(\n param(\n lambda t: t.double_col.add(1).log10(),\n lambda t: np.log10(t.double_col + 1),\n- marks=pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n id=\"log10\",\n ),\n param(\n@@ -1031,7 +994,6 @@ def test_simple_math_functions_columns(\n ),\n id=\"log_base_bigint\",\n marks=[\n- pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n pytest.mark.notimpl(\n [\"datafusion\"], raises=com.OperationNotDefinedError\n ),\n@@ -1133,11 +1095,12 @@ def test_backend_specific_numerics(backend, con, df, alltypes, expr_fn, expected\n operator.mul,\n operator.truediv,\n operator.floordiv,\n- operator.pow,\n+ param(\n+ operator.pow, marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)]\n+ ),\n ],\n ids=lambda op: op.__name__,\n )\[email protected]([\"exasol\"], raises=AttributeError)\n def test_binary_arithmetic_operations(backend, alltypes, df, op):\n smallint_col = alltypes.smallint_col + 1 # make it nonzero\n smallint_series = df.smallint_col + 1\n@@ -1155,7 +1118,6 @@ def test_binary_arithmetic_operations(backend, alltypes, df, op):\n backend.assert_series_equal(result, expected, check_exact=False)\n \n \[email protected]([\"exasol\"], raises=AttributeError)\n def test_mod(backend, alltypes, df):\n expr = operator.mod(alltypes.smallint_col, alltypes.smallint_col + 1).name(\"tmp\")\n \n@@ -1182,7 +1144,6 @@ def test_mod(backend, alltypes, df):\n \"Cannot apply '%' to arguments of type '<DOUBLE> % <SMALLINT>'. Supported form(s): '<EXACT_NUMERIC> % <EXACT_NUMERIC>\",\n raises=Py4JError,\n )\[email protected]([\"exasol\"], raises=AttributeError)\n def test_floating_mod(backend, alltypes, df):\n expr = operator.mod(alltypes.double_col, alltypes.smallint_col + 1).name(\"tmp\")\n \n@@ -1339,7 +1300,7 @@ def test_floating_mod(backend, alltypes, df):\n @pytest.mark.notyet([\"mssql\"], raises=(sa.exc.OperationalError, sa.exc.DataError))\n @pytest.mark.notyet([\"snowflake\"], raises=SnowflakeProgrammingError)\n @pytest.mark.notyet([\"postgres\"], raises=PsycoPg2DivisionByZero)\[email protected]([\"exasol\"], raises=(sa.exc.DBAPIError, com.IbisTypeError))\[email protected]([\"exasol\"], raises=ExaQueryError)\n def test_divide_by_zero(backend, alltypes, df, column, denominator):\n expr = alltypes[column] / denominator\n result = expr.name(\"tmp\").execute()\n@@ -1455,13 +1416,7 @@ def test_random(con):\n [\n param(lambda x: x.clip(lower=0), lambda x: x.clip(lower=0), id=\"lower-int\"),\n param(\n- lambda x: x.clip(lower=0.0),\n- lambda x: x.clip(lower=0.0),\n- marks=pytest.mark.notimpl(\n- \"exasol\",\n- raises=ExaQueryError,\n- ),\n- id=\"lower-float\",\n+ lambda x: x.clip(lower=0.0), lambda x: x.clip(lower=0.0), id=\"lower-float\"\n ),\n param(lambda x: x.clip(upper=0), lambda x: x.clip(upper=0), id=\"upper-int\"),\n param(\n@@ -1482,10 +1437,6 @@ def test_random(con):\n param(\n lambda x: x.clip(lower=0, upper=1.0),\n lambda x: x.clip(lower=0, upper=1.0),\n- marks=pytest.mark.notimpl(\n- \"exasol\",\n- raises=ExaQueryError,\n- ),\n id=\"lower-upper-float\",\n ),\n param(\n@@ -1509,7 +1460,7 @@ def test_clip(backend, alltypes, df, ibis_func, pandas_func):\n backend.assert_series_equal(result, expected, check_names=False)\n \n \[email protected]([\"polars\", \"exasol\"], raises=com.OperationNotDefinedError)\[email protected]([\"polars\"], raises=com.OperationNotDefinedError)\n @pytest.mark.broken(\n [\"druid\"],\n raises=PyDruidProgrammingError,\n@@ -1623,9 +1574,8 @@ def test_bitwise_scalars(con, op, left, right):\n assert result == expected\n \n \[email protected]([\"datafusion\"], raises=com.OperationNotDefinedError)\[email protected]([\"datafusion\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl([\"oracle\"], raises=sa.exc.DatabaseError)\[email protected]([\"exasol\"], raises=ExaQueryError)\n @flink_no_bitwise\n def test_bitwise_not_scalar(con):\n expr = ~L(2)\n@@ -1634,9 +1584,8 @@ def test_bitwise_not_scalar(con):\n assert result == expected\n \n \[email protected]([\"datafusion\"], raises=com.OperationNotDefinedError)\[email protected]([\"datafusion\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl([\"oracle\"], raises=sa.exc.DatabaseError)\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n @flink_no_bitwise\n def test_bitwise_not_col(backend, alltypes, df):\n expr = (~alltypes.int_col).name(\"tmp\")\n", "test_sql.py": "@@ -11,22 +11,22 @@ from ibis.backends.conftest import _get_backends_to_test\n sa = pytest.importorskip(\"sqlalchemy\")\n sg = pytest.importorskip(\"sqlglot\")\n \n-pytestmark = pytest.mark.notimpl([\"flink\", \"exasol\", \"risingwave\"])\n+pytestmark = pytest.mark.notimpl([\"flink\", \"risingwave\"])\n \n simple_literal = param(ibis.literal(1), id=\"simple_literal\")\n array_literal = param(\n ibis.array([1]),\n marks=[\n pytest.mark.never(\n- [\"mysql\", \"mssql\", \"oracle\", \"impala\", \"sqlite\"],\n- raises=exc.OperationNotDefinedError,\n+ [\"mysql\", \"mssql\", \"oracle\", \"impala\", \"sqlite\", \"exasol\"],\n+ raises=(exc.OperationNotDefinedError, exc.UnsupportedBackendType),\n reason=\"arrays not supported in the backend\",\n ),\n ],\n id=\"array_literal\",\n )\n no_structs = pytest.mark.never(\n- [\"impala\", \"mysql\", \"sqlite\", \"mssql\"],\n+ [\"impala\", \"mysql\", \"sqlite\", \"mssql\", \"exasol\"],\n raises=(NotImplementedError, sa.exc.CompileError, exc.UnsupportedBackendType),\n reason=\"structs not supported in the backend\",\n )\n@@ -117,7 +117,9 @@ def test_isin_bug(con, snapshot):\n raises=NotImplementedError,\n )\n @pytest.mark.notyet(\n- [\"datafusion\"], reason=\"no unnest support\", raises=exc.OperationNotDefinedError\n+ [\"datafusion\", \"exasol\"],\n+ reason=\"no unnest support\",\n+ raises=exc.OperationNotDefinedError,\n )\n @pytest.mark.notyet(\n [\"sqlite\", \"mysql\", \"druid\", \"impala\", \"mssql\"], reason=\"no unnest support upstream\"\n", "test_string.py": "@@ -962,13 +962,14 @@ def test_capitalize(con):\n [\"dask\", \"pandas\", \"polars\", \"oracle\", \"flink\"], raises=com.OperationNotDefinedError\n )\n @pytest.mark.notyet(\n- [\"mssql\", \"sqlite\", \"exasol\"],\n- reason=\"no arrays\",\n- raises=com.OperationNotDefinedError,\n+ [\"mssql\", \"sqlite\"], reason=\"no arrays\", raises=com.OperationNotDefinedError\n )\n @pytest.mark.never(\n [\"mysql\"], raises=com.OperationNotDefinedError, reason=\"no array support\"\n )\[email protected](\n+ [\"exasol\"], raises=com.UnsupportedBackendType, reason=\"no array support\"\n+)\n @pytest.mark.notimpl(\n [\"impala\"], raises=com.UnsupportedBackendType, reason=\"no array support\"\n )\n", "test_temporal.py": "@@ -47,7 +47,6 @@ from ibis.common.annotations import ValidationError\n raises=AttributeError,\n reason=\"Can only use .dt accessor with datetimelike values\",\n )\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n def test_date_extract(backend, alltypes, df, attr, expr_fn):\n expr = getattr(expr_fn(alltypes.timestamp_col), attr)()\n expected = getattr(df.timestamp_col.dt, attr).astype(\"int32\")\n@@ -60,13 +59,9 @@ def test_date_extract(backend, alltypes, df, attr, expr_fn):\n @pytest.mark.parametrize(\n \"attr\",\n [\n- param(\n- \"year\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]\n- ),\n- param(\n- \"month\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]\n- ),\n- param(\"day\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]),\n+ \"year\",\n+ \"month\",\n+ \"day\",\n param(\n \"day_of_year\",\n marks=[\n@@ -80,24 +75,26 @@ def test_date_extract(backend, alltypes, df, attr, expr_fn):\n \"quarter\",\n marks=[\n pytest.mark.notyet([\"oracle\"], raises=sa.exc.DatabaseError),\n- pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n ],\n ),\n+ \"hour\",\n+ \"minute\",\n param(\n- \"hour\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]\n- ),\n- param(\n- \"minute\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]\n- ),\n- param(\n- \"second\", marks=[pytest.mark.notimpl([\"exasol\"], raises=sa.exc.DBAPIError)]\n+ \"second\",\n+ marks=[\n+ pytest.mark.broken(\n+ [\"exasol\"],\n+ raises=AssertionError,\n+ reason=\"seems like exasol might be rounding\",\n+ )\n+ ],\n ),\n ],\n )\[email protected]([\"druid\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl(\n [\"druid\"],\n- raises=AttributeError,\n+ raises=(AttributeError, com.OperationNotDefinedError),\n reason=\"AttributeError: 'StringColumn' object has no attribute 'X'\",\n )\n def test_timestamp_extract(backend, alltypes, df, attr):\n@@ -113,42 +110,12 @@ def test_timestamp_extract(backend, alltypes, df, attr):\n @pytest.mark.parametrize(\n (\"func\", \"expected\"),\n [\n- param(\n- methodcaller(\"year\"),\n- 2015,\n- id=\"year\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n- param(\n- methodcaller(\"month\"),\n- 9,\n- id=\"month\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n- param(\n- methodcaller(\"day\"),\n- 1,\n- id=\"day\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n- param(\n- methodcaller(\"hour\"),\n- 14,\n- id=\"hour\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n- param(\n- methodcaller(\"minute\"),\n- 48,\n- id=\"minute\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n- param(\n- methodcaller(\"second\"),\n- 5,\n- id=\"second\",\n- marks=[pytest.mark.notimpl([\"exasol\"], raises=ExaQueryError)],\n- ),\n+ param(methodcaller(\"year\"), 2015, id=\"year\"),\n+ param(methodcaller(\"month\"), 9, id=\"month\"),\n+ param(methodcaller(\"day\"), 1, id=\"day\"),\n+ param(methodcaller(\"hour\"), 14, id=\"hour\"),\n+ param(methodcaller(\"minute\"), 48, id=\"minute\"),\n+ param(methodcaller(\"second\"), 5, id=\"second\"),\n param(\n methodcaller(\"millisecond\"),\n 359,\n@@ -262,13 +229,12 @@ def test_timestamp_extract_epoch_seconds(backend, alltypes, df):\n backend.assert_series_equal(result, expected)\n \n \[email protected]([\"oracle\"], raises=com.OperationNotDefinedError)\[email protected]([\"oracle\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl(\n [\"druid\"],\n raises=AttributeError,\n reason=\"'StringColumn' object has no attribute 'week_of_year'\",\n )\[email protected]([\"exasol\"], raises=com.OperationNotDefinedError)\n def test_timestamp_extract_week_of_year(backend, alltypes, df):\n expr = alltypes.timestamp_col.week_of_year().name(\"tmp\")\n result = expr.execute()\n@@ -344,7 +310,7 @@ PANDAS_UNITS = {\n param(\n \"W\",\n marks=[\n- pytest.mark.broken([\"sqlite\"], raises=AssertionError),\n+ pytest.mark.broken([\"sqlite\", \"exasol\"], raises=AssertionError),\n pytest.mark.notimpl([\"mysql\"], raises=com.UnsupportedOperationError),\n pytest.mark.broken(\n [\"polars\"],\n@@ -480,7 +446,7 @@ PANDAS_UNITS = {\n reason=\"attempt to calculate the remainder with a divisor of zero\",\n ),\n pytest.mark.notimpl(\n- [\"flink\"],\n+ [\"flink\", \"exasol\"],\n raises=com.UnsupportedOperationError,\n reason=\"<IntervalUnit.NANOSECOND: 'ns'> unit is not supported in timestamp truncate\",\n ),\n@@ -488,13 +454,12 @@ PANDAS_UNITS = {\n ),\n ],\n )\[email protected]([\"oracle\"], raises=com.OperationNotDefinedError)\[email protected]([\"oracle\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.broken(\n [\"druid\"],\n raises=AttributeError,\n reason=\"AttributeError: 'StringColumn' object has no attribute 'truncate'\",\n )\[email protected]([\"exasol\"], raises=com.OperationNotDefinedError)\n def test_timestamp_truncate(backend, alltypes, df, unit):\n expr = alltypes.timestamp_col.truncate(unit).name(\"tmp\")\n \n@@ -565,6 +530,11 @@ def test_timestamp_truncate(backend, alltypes, df, unit):\n \"Timestamp truncation is not supported in Flink\"\n ),\n ),\n+ pytest.mark.broken(\n+ [\"exasol\"],\n+ raises=AssertionError,\n+ reason=\"behavior is different than expected\",\n+ ),\n ],\n ),\n ],\n@@ -581,7 +551,6 @@ def test_timestamp_truncate(backend, alltypes, df, unit):\n raises=AttributeError,\n reason=\"AttributeError: 'StringColumn' object has no attribute 'date'\",\n )\[email protected]([\"exasol\"], raises=com.OperationNotDefinedError)\n def test_date_truncate(backend, alltypes, df, unit):\n expr = alltypes.timestamp_col.date().truncate(unit).name(\"tmp\")\n \n@@ -848,8 +817,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n id=\"timestamp-add-interval\",\n marks=[\n pytest.mark.notimpl(\n- [\"sqlite\"],\n- raises=com.OperationNotDefinedError,\n+ [\"sqlite\", \"exasol\"], raises=com.OperationNotDefinedError\n ),\n pytest.mark.notimpl(\n [\"druid\"],\n@@ -872,6 +840,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n \"snowflake\",\n \"sqlite\",\n \"bigquery\",\n+ \"exasol\"\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -898,6 +867,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n \"polars\",\n \"snowflake\",\n \"bigquery\",\n+ \"exasol\"\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -921,8 +891,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n reason=\"unsupported operand type(s) for -: 'StringColumn' and 'IntervalScalar'\",\n ),\n pytest.mark.notimpl(\n- [\"sqlite\"],\n- raises=com.OperationNotDefinedError,\n+ [\"sqlite\", \"exasol\"], raises=com.OperationNotDefinedError\n ),\n ],\n ),\n@@ -941,6 +910,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n raises=AttributeError,\n reason=\"'StringColumn' object has no attribute 'date'\",\n ),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n ],\n ),\n param(\n@@ -958,6 +928,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n raises=AttributeError,\n reason=\"'StringColumn' object has no attribute 'date'\",\n ),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n ],\n ),\n param(\n@@ -998,6 +969,7 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n raises=Exception,\n reason=\"pyarrow.lib.ArrowInvalid: Casting from duration[us] to duration[s] would lose data\",\n ),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n ],\n ),\n param(\n@@ -1040,7 +1012,6 @@ timestamp_value = pd.Timestamp(\"2018-01-01 18:18:18\")\n ],\n )\n @pytest.mark.notimpl([\"mssql\", \"oracle\"], raises=com.OperationNotDefinedError)\[email protected]([\"exasol\"], raises=com.OperationNotDefinedError)\n def test_temporal_binop(backend, con, alltypes, df, expr_fn, expected_fn):\n expr = expr_fn(alltypes, backend).name(\"tmp\")\n expected = expected_fn(df, backend)\n@@ -1284,7 +1255,6 @@ def test_temporal_binop_pandas_timedelta(\n raises=AttributeError,\n reason=\"Can only use .dt accessor with datetimelike values\",\n )\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n def test_timestamp_comparison_filter(backend, con, alltypes, df, func_name):\n ts = pd.Timestamp(\"20100302\", tz=\"UTC\").to_pydatetime()\n \n@@ -1842,14 +1812,13 @@ DATE_BACKEND_TYPES = {\n }\n \n \[email protected]([\"pandas\", \"dask\"], raises=com.OperationNotDefinedError)\[email protected]([\"pandas\", \"dask\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl(\n [\"druid\"], raises=PyDruidProgrammingError, reason=\"SQL parse failed\"\n )\n @pytest.mark.notimpl(\n [\"oracle\"], raises=sa.exc.DatabaseError, reason=\"ORA-00936 missing expression\"\n )\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1880,13 +1849,13 @@ TIMESTAMP_BACKEND_TYPES = {\n \n \n @pytest.mark.notimpl(\n- [\"pandas\", \"dask\", \"pyspark\", \"mysql\"], raises=com.OperationNotDefinedError\n+ [\"pandas\", \"dask\", \"pyspark\", \"mysql\", \"exasol\"],\n+ raises=com.OperationNotDefinedError,\n )\n @pytest.mark.notimpl(\n [\"oracle\"], raises=sa.exc.DatabaseError, reason=\"ORA-00904: MAKE TIMESTAMP invalid\"\n )\n @pytest.mark.notyet([\"impala\"], raises=com.OperationNotDefinedError)\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1905,7 +1874,8 @@ def test_timestamp_literal(con, backend):\n \n \n @pytest.mark.notimpl(\n- [\"pandas\", \"mysql\", \"dask\", \"pyspark\"], raises=com.OperationNotDefinedError\n+ [\"pandas\", \"mysql\", \"dask\", \"pyspark\", \"exasol\"],\n+ raises=com.OperationNotDefinedError,\n )\n @pytest.mark.notimpl(\n [\"sqlite\"],\n@@ -1950,7 +1920,6 @@ def test_timestamp_literal(con, backend):\n \"<NUMERIC>, <NUMERIC>, <NUMERIC>, <NUMERIC>)\"\n ),\n )\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -1980,10 +1949,11 @@ TIME_BACKEND_TYPES = {\n [\"pandas\", \"datafusion\", \"dask\", \"pyspark\", \"polars\", \"mysql\"],\n raises=com.OperationNotDefinedError,\n )\[email protected]([\"clickhouse\", \"impala\"], raises=com.OperationNotDefinedError)\[email protected](\n+ [\"clickhouse\", \"impala\", \"exasol\"], raises=com.OperationNotDefinedError\n+)\n @pytest.mark.notimpl([\"oracle\"], raises=sa.exc.DatabaseError)\n @pytest.mark.notimpl([\"druid\"], raises=com.OperationNotDefinedError)\[email protected]([\"exasol\"], raises=ExaQueryError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -2124,7 +2094,7 @@ def test_interval_literal(con, backend):\n assert con.execute(expr.typeof()) == INTERVAL_BACKEND_TYPES[backend_name]\n \n \[email protected]([\"pandas\", \"dask\"], raises=com.OperationNotDefinedError)\[email protected]([\"pandas\", \"dask\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.broken(\n [\"druid\"],\n raises=AttributeError,\n@@ -2133,7 +2103,6 @@ def test_interval_literal(con, backend):\n @pytest.mark.broken(\n [\"oracle\"], raises=sa.exc.DatabaseError, reason=\"ORA-00936: missing expression\"\n )\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -2150,7 +2119,8 @@ def test_date_column_from_ymd(backend, con, alltypes, df):\n \n \n @pytest.mark.notimpl(\n- [\"pandas\", \"dask\", \"pyspark\", \"mysql\"], raises=com.OperationNotDefinedError\n+ [\"pandas\", \"dask\", \"pyspark\", \"mysql\", \"exasol\"],\n+ raises=com.OperationNotDefinedError,\n )\n @pytest.mark.broken(\n [\"druid\"],\n@@ -2161,7 +2131,6 @@ def test_date_column_from_ymd(backend, con, alltypes, df):\n [\"oracle\"], raises=sa.exc.DatabaseError, reason=\"ORA-00904 make timestamp invalid\"\n )\n @pytest.mark.notyet([\"impala\"], raises=com.OperationNotDefinedError)\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\n @pytest.mark.notimpl(\n [\"risingwave\"],\n raises=sa.exc.InternalError,\n@@ -2224,16 +2193,10 @@ def test_timestamp_extract_milliseconds_with_big_value(con):\n @pytest.mark.notimpl(\n [\"datafusion\"],\n raises=Exception,\n- reason=(\n- \"This feature is not implemented: Unsupported CAST from Int32 to Timestamp(Nanosecond, None)\"\n- ),\n+ reason=\"Unsupported CAST from Int32 to Timestamp(Nanosecond, None)\",\n )\[email protected](\n- [\"oracle\"],\n- raises=sa.exc.DatabaseError,\n- reason=\"ORA-00932\",\n-)\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\[email protected]([\"oracle\"], raises=sa.exc.DatabaseError, reason=\"ORA-00932\")\[email protected]([\"exasol\"], raises=ExaQueryError)\n def test_integer_cast_to_timestamp_column(backend, alltypes, df):\n expr = alltypes.int_col.cast(\"timestamp\")\n expected = pd.to_datetime(df.int_col, unit=\"s\").rename(expr.get_name())\n@@ -2242,7 +2205,7 @@ def test_integer_cast_to_timestamp_column(backend, alltypes, df):\n \n \n @pytest.mark.notimpl([\"oracle\"], raises=sa.exc.DatabaseError)\[email protected]([\"exasol\"], raises=sa.exc.DBAPIError)\[email protected]([\"exasol\"], raises=ExaQueryError)\n def test_integer_cast_to_timestamp_scalar(alltypes, df):\n expr = alltypes.int_col.min().cast(\"timestamp\")\n result = expr.execute()\n@@ -2344,7 +2307,6 @@ def test_timestamp_date_comparison(backend, alltypes, df, left_fn, right_fn):\n reason=\"Casting from timestamp[s] to timestamp[ns] would result in out of bounds timestamp: 81953424000\",\n raises=ArrowInvalid,\n )\[email protected]([\"exasol\"], raises=ExaQueryError)\n def test_large_timestamp(con):\n huge_timestamp = datetime.datetime(year=4567, month=1, day=1)\n expr = ibis.timestamp(\"4567-01-01 00:00:00\")\n@@ -2377,6 +2339,7 @@ def test_large_timestamp(con):\n reason=\"time_parse truncates to milliseconds\",\n raises=AssertionError,\n ),\n+ pytest.mark.notimpl([\"exasol\"], raises=AssertionError),\n ],\n ),\n param(\n@@ -2428,6 +2391,7 @@ def test_large_timestamp(con):\n raises=sa.exc.InternalError,\n reason=\"Parse error: timestamp without time zone Can't cast string to timestamp (expected format is YYYY-MM-DD HH:MM:SS[.D+{up to 6 digits}] or YYYY-MM-DD HH:MM or YYYY-MM-DD or ISO 8601 format)\",\n ),\n+ pytest.mark.notimpl([\"exasol\"], raises=AssertionError),\n ],\n ),\n ],\n@@ -2437,7 +2401,6 @@ def test_large_timestamp(con):\n raises=sa.exc.DatabaseError,\n reason=\"ORA-01843: invalid month was specified\",\n )\[email protected]([\"exasol\"], raises=ExaQueryError)\n def test_timestamp_precision_output(con, ts, scale, unit):\n dtype = dt.Timestamp(scale=scale)\n expr = ibis.literal(ts).cast(dtype)\n@@ -2576,10 +2539,7 @@ def test_delta(con, start, end, unit, expected):\n {\"seconds\": 2},\n \"2s\",\n marks=[\n- pytest.mark.notimpl(\n- [\"datafusion\"],\n- raises=com.OperationNotDefinedError,\n- ),\n+ pytest.mark.notimpl([\"datafusion\"], raises=com.OperationNotDefinedError)\n ],\n id=\"seconds\",\n ),\n@@ -2587,10 +2547,7 @@ def test_delta(con, start, end, unit, expected):\n {\"minutes\": 5},\n \"300s\",\n marks=[\n- pytest.mark.notimpl(\n- [\"datafusion\"],\n- raises=com.OperationNotDefinedError,\n- ),\n+ pytest.mark.notimpl([\"datafusion\"], raises=com.OperationNotDefinedError)\n ],\n id=\"minutes\",\n ),\n@@ -2598,10 +2555,7 @@ def test_delta(con, start, end, unit, expected):\n {\"hours\": 2},\n \"2h\",\n marks=[\n- pytest.mark.notimpl(\n- [\"datafusion\"],\n- raises=com.OperationNotDefinedError,\n- ),\n+ pytest.mark.notimpl([\"datafusion\"], raises=com.OperationNotDefinedError)\n ],\n id=\"hours\",\n ),\n", "test_window.py": "@@ -14,6 +14,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n from ibis.backends.tests.errors import (\n ClickHouseDatabaseError,\n+ ExaQueryError,\n GoogleBadRequest,\n ImpalaHiveServer2Error,\n MySQLOperationalError,\n@@ -24,17 +25,9 @@ from ibis.backends.tests.errors import (\n from ibis.legacy.udf.vectorized import analytic, reduction\n \n pytestmark = [\n- pytest.mark.notimpl(\n- [\"exasol\"],\n- raises=(\n- sa.exc.ProgrammingError,\n- sa.exc.NoSuchTableError,\n- com.OperationNotDefinedError,\n- ),\n- ),\n pytest.mark.notimpl(\n [\"druid\"], raises=(com.OperationNotDefinedError, PyDruidProgrammingError)\n- ),\n+ )\n ]\n \n \n@@ -163,7 +156,9 @@ def calc_zscore(s):\n lambda t: t.id.rank(method=\"min\") / t.id.transform(len),\n id=\"cume_dist\",\n marks=[\n- pytest.mark.notyet([\"clickhouse\"], raises=com.OperationNotDefinedError),\n+ pytest.mark.notyet(\n+ [\"clickhouse\", \"exasol\"], raises=com.OperationNotDefinedError\n+ ),\n pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n pytest.mark.notimpl(\n@@ -208,13 +203,19 @@ def calc_zscore(s):\n lambda t, win: t.float_col.first().over(win),\n lambda t: t.float_col.transform(\"first\"),\n id=\"first\",\n- marks=pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n+ marks=[\n+ pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n+ ],\n ),\n param(\n lambda t, win: t.float_col.last().over(win),\n lambda t: t.float_col.transform(\"last\"),\n id=\"last\",\n- marks=pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n+ marks=[\n+ pytest.mark.notimpl([\"dask\"], raises=NotImplementedError),\n+ pytest.mark.notimpl([\"exasol\"], raises=com.OperationNotDefinedError),\n+ ],\n ),\n param(\n lambda t, win: t.double_col.nth(3).over(win),\n@@ -430,6 +431,7 @@ def test_grouped_bounded_expanding_window(\n \"snowflake\",\n \"datafusion\",\n \"trino\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -589,6 +591,7 @@ def test_grouped_bounded_preceding_window(backend, alltypes, df, window_fn):\n \"snowflake\",\n \"trino\",\n \"datafusion\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -786,6 +789,7 @@ def test_simple_ungrouped_window_with_scalar_order_by(alltypes):\n \"snowflake\",\n \"trino\",\n \"datafusion\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -817,6 +821,7 @@ def test_simple_ungrouped_window_with_scalar_order_by(alltypes):\n \"snowflake\",\n \"trino\",\n \"datafusion\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -942,6 +947,7 @@ def test_simple_ungrouped_window_with_scalar_order_by(alltypes):\n \"snowflake\",\n \"trino\",\n \"datafusion\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -974,6 +980,7 @@ def test_simple_ungrouped_window_with_scalar_order_by(alltypes):\n \"snowflake\",\n \"trino\",\n \"datafusion\",\n+ \"exasol\",\n ],\n raises=com.OperationNotDefinedError,\n ),\n@@ -1165,7 +1172,7 @@ def test_mutate_window_filter(backend, alltypes):\n backend.assert_frame_equal(res, sol, check_dtype=False)\n \n \[email protected]([\"polars\"], raises=com.OperationNotDefinedError)\[email protected]([\"polars\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl(\n [\"flink\"],\n raises=Exception,\n@@ -1227,6 +1234,11 @@ def test_first_last(backend):\n raises=sa.exc.InternalError,\n reason=\"sql parser error: Expected literal int, found: INTERVAL at line:1, column:99\",\n )\[email protected](\n+ [\"exasol\"],\n+ raises=ExaQueryError,\n+ reason=\"database can't handle UTC timestamps in DataFrames\",\n+)\n def test_range_expression_bounds(backend):\n t = ibis.memtable(\n {\n", "pyproject.toml": "@@ -77,6 +77,7 @@ polars = { version = \">=0.19.3,<1\", optional = true }\n psycopg2 = { version = \">=2.8.4,<3\", optional = true }\n pydata-google-auth = { version = \">=1.4.0,<2\", optional = true }\n pydruid = { version = \">=0.6.5,<1\", optional = true }\n+pyexasol = { version = \">=0.25.2,<1\", optional = true, extras = [\"pandas\"] }\n pymysql = { version = \">=1,<2\", optional = true }\n pyodbc = { version = \">=4.0.39,<6\", optional = true }\n pyspark = { version = \">=3,<4\", optional = true }\n@@ -87,7 +88,6 @@ shapely = { version = \">=2,<3\", optional = true }\n # issues with versions <3.0.2\n snowflake-connector-python = { version = \">=3.0.2,<4,!=3.3.0b1\", optional = true }\n sqlalchemy = { version = \">=1.4,<3\", optional = true }\n-sqlalchemy-exasol = { version = \">=4.6.0\", optional = true }\n sqlalchemy-views = { version = \">=0.3.1,<1\", optional = true }\n sqlalchemy-risingwave = { version = \">=1.0.0,<2\", optional = true }\n trino = { version = \">=0.321,<1\", optional = true }\n@@ -162,6 +162,7 @@ all = [\n \"psycopg2\",\n \"pydata-google-auth\",\n \"pydruid\",\n+ \"pyexasol\",\n \"pymysql\",\n \"pyodbc\",\n \"pyspark\",\n@@ -169,7 +170,6 @@ all = [\n \"shapely\",\n \"snowflake-connector-python\",\n \"sqlalchemy\",\n- \"sqlalchemy-exasol\",\n \"sqlalchemy-views\",\n \"sqlalchemy-risingwave\",\n \"trino\",\n@@ -185,7 +185,7 @@ dask = [\"dask\", \"regex\"]\n datafusion = [\"datafusion\"]\n druid = [\"pydruid\"]\n duckdb = [\"duckdb\"]\n-exasol = [\"sqlalchemy\", \"sqlalchemy-exasol\", \"sqlalchemy-views\"]\n+exasol = [\"pyexasol\"]\n flink = []\n geospatial = [\"geopandas\", \"shapely\"]\n impala = [\"impyla\"]\n"}
refactor(sql): make compilers usable with a base install (#9766)
84a786d236912839a4be4fb05b6e2e8097bb01a3
refactor
https://github.com/rohankumardubey/ibis/commit/84a786d236912839a4be4fb05b6e2e8097bb01a3
make compilers usable with a base install (#9766)
{"__init__.py": "@@ -14,13 +14,13 @@ import sqlglot.expressions as sge\n import trino\n \n import ibis\n+import ibis.backends.sql.compilers as sc\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n from ibis import util\n from ibis.backends import CanCreateDatabase, CanCreateSchema, CanListCatalog\n from ibis.backends.sql import SQLBackend\n-from ibis.backends.sql.compilers import TrinoCompiler\n from ibis.backends.sql.compilers.base import C\n \n if TYPE_CHECKING:\n@@ -36,7 +36,7 @@ if TYPE_CHECKING:\n \n class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema):\n name = \"trino\"\n- compiler = TrinoCompiler()\n+ compiler = sc.trino.compiler\n supports_create_or_replace = False\n supports_temporary_tables = False\n \n@@ -490,7 +490,7 @@ class Backend(SQLBackend, CanListCatalog, CanCreateDatabase, CanCreateSchema):\n )\n for name, typ in (schema or table.schema()).items()\n )\n- ).from_(self._to_sqlglot(table).subquery())\n+ ).from_(self.compiler.to_sqlglot(table).subquery())\n else:\n select = None\n \n", "out.sql": "@@ -1,5 +0,0 @@\n-SELECT\n- t0.`title`\n-FROM `bigquery-public-data`.stackoverflow.posts_questions AS t0\n-INNER JOIN `nyc-tlc`.yellow.trips AS t1\n- ON t0.`tags` = t1.`rate_code`\n\\ No newline at end of file\n", "test_client.py": "@@ -199,11 +199,9 @@ def test_parted_column(con, kind):\n assert t.columns == [expected_column, \"string_col\", \"int_col\"]\n \n \n-def test_cross_project_query(public, snapshot):\n+def test_cross_project_query(public):\n table = public.table(\"posts_questions\")\n expr = table[table.tags.contains(\"ibis\")][[\"title\", \"tags\"]]\n- result = expr.compile()\n- snapshot.assert_match(result, \"out.sql\")\n n = 5\n df = expr.limit(n).execute()\n assert len(df) == n\n@@ -226,17 +224,6 @@ def test_exists_table_different_project(con):\n assert \"foobar\" not in con.list_tables(database=dataset)\n \n \n-def test_multiple_project_queries(con, snapshot):\n- so = con.table(\n- \"posts_questions\",\n- database=(\"bigquery-public-data\", \"stackoverflow\"),\n- )\n- trips = con.table(\"trips\", database=\"nyc-tlc.yellow\")\n- join = so.join(trips, so.tags == trips.rate_code)[[so.title]]\n- result = join.compile()\n- snapshot.assert_match(result, \"out.sql\")\n-\n-\n def test_multiple_project_queries_execute(con):\n posts_questions = con.table(\n \"posts_questions\", database=\"bigquery-public-data.stackoverflow\"\n", "test_core.py": "@@ -6,7 +6,10 @@ import tempfile\n \n import pytest\n \n-from ibis.backends.bigquery.udf.core import PythonToJavaScriptTranslator, SymbolTable\n+from ibis.backends.sql.compilers.bigquery.udf.core import (\n+ PythonToJavaScriptTranslator,\n+ SymbolTable,\n+)\n \n \n def test_symbol_table():\n", "test_find.py": "@@ -2,7 +2,7 @@ from __future__ import annotations\n \n import ast\n \n-from ibis.backends.bigquery.udf.find import find_names\n+from ibis.backends.sql.compilers.bigquery.udf.find import find_names\n from ibis.util import is_iterable\n \n \n", "base.py": "@@ -496,6 +496,24 @@ class SQLGlotCompiler(abc.ABC):\n def type_mapper(self) -> type[SqlglotType]:\n \"\"\"The type mapper for the backend.\"\"\"\n \n+ def _compile_builtin_udf(self, udf_node: ops.ScalarUDF) -> None: # noqa: B027\n+ \"\"\"No-op.\"\"\"\n+\n+ def _compile_python_udf(self, udf_node: ops.ScalarUDF) -> None:\n+ raise NotImplementedError(\n+ f\"Python UDFs are not supported in the {self.dialect} backend\"\n+ )\n+\n+ def _compile_pyarrow_udf(self, udf_node: ops.ScalarUDF) -> None:\n+ raise NotImplementedError(\n+ f\"PyArrow UDFs are not supported in the {self.dialect} backend\"\n+ )\n+\n+ def _compile_pandas_udf(self, udf_node: ops.ScalarUDF) -> str:\n+ raise NotImplementedError(\n+ f\"pandas UDFs are not supported in the {self.dialect} backend\"\n+ )\n+\n # Concrete API\n \n def if_(self, condition, true, false: sge.Expression | None = None) -> sge.If:\n@@ -517,6 +535,34 @@ class SQLGlotCompiler(abc.ABC):\n result[node] = value\n return result\n \n+ def to_sqlglot(\n+ self,\n+ expr: ir.Expr,\n+ *,\n+ limit: str | None = None,\n+ params: Mapping[ir.Expr, Any] | None = None,\n+ ):\n+ import ibis\n+\n+ table_expr = expr.as_table()\n+\n+ if limit == \"default\":\n+ limit = ibis.options.sql.default_limit\n+ if limit is not None:\n+ table_expr = table_expr.limit(limit)\n+\n+ if params is None:\n+ params = {}\n+\n+ sql = self.translate(table_expr.op(), params=params)\n+ assert not isinstance(sql, sge.Subquery)\n+\n+ if isinstance(sql, sge.Table):\n+ sql = sg.select(STAR, copy=False).from_(sql, copy=False)\n+\n+ assert not isinstance(sql, sge.Subquery)\n+ return sql\n+\n def translate(self, op, *, params: Mapping[ir.Value, Any]) -> sge.Expression:\n \"\"\"Translate an ibis operation to a sqlglot expression.\n \n", "core.py": "@@ -10,8 +10,8 @@ import textwrap\n from collections import ChainMap\n from typing import TYPE_CHECKING\n \n-from ibis.backends.bigquery.udf.find import find_names\n-from ibis.backends.bigquery.udf.rewrite import rewrite\n+from ibis.backends.sql.compilers.bigquery.udf.find import find_names\n+from ibis.backends.sql.compilers.bigquery.udf.rewrite import rewrite\n \n if TYPE_CHECKING:\n from collections.abc import Callable\n", "find.py": "", "rewrite.py": "", "clickhouse.py": "@@ -795,3 +795,6 @@ class ClickHouseCompiler(SQLGlotCompiler):\n return self.if_(\n sg.or_(arg.is_(NULL), key.is_(NULL)), NULL, self.f.mapContains(arg, key)\n )\n+\n+\n+compiler = ClickHouseCompiler()\n", "datafusion.py": "@@ -497,3 +497,6 @@ class DataFusionCompiler(SQLGlotCompiler):\n return super().visit_GroupConcat(\n op, arg=arg, sep=sep, where=where, order_by=order_by\n )\n+\n+\n+compiler = DataFusionCompiler()\n", "druid.py": "@@ -199,3 +199,6 @@ class DruidCompiler(SQLGlotCompiler):\n \"Z\",\n )\n )\n+\n+\n+compiler = DruidCompiler()\n", "duckdb.py": "@@ -2,6 +2,7 @@ from __future__ import annotations\n \n import math\n from functools import partial, reduce\n+from typing import TYPE_CHECKING, Any\n \n import sqlglot as sg\n import sqlglot.expressions as sge\n@@ -16,6 +17,12 @@ from ibis.backends.sql.datatypes import DuckDBType\n from ibis.backends.sql.rewrites import exclude_nulls_from_array_collect\n from ibis.util import gen_name\n \n+if TYPE_CHECKING:\n+ from collections.abc import Mapping\n+\n+ import ibis.expr.types as ir\n+\n+\n _INTERVAL_SUFFIXES = {\n \"ms\": \"milliseconds\",\n \"us\": \"microseconds\",\n@@ -98,6 +105,33 @@ class DuckDBCompiler(SQLGlotCompiler):\n ops.GeoY: \"st_y\",\n }\n \n+ def to_sqlglot(\n+ self,\n+ expr: ir.Expr,\n+ *,\n+ limit: str | None = None,\n+ params: Mapping[ir.Expr, Any] | None = None,\n+ ):\n+ sql = super().to_sqlglot(expr, limit=limit, params=params)\n+\n+ table_expr = expr.as_table()\n+ geocols = table_expr.schema().geospatial\n+\n+ if not geocols:\n+ return sql\n+\n+ quoted = self.quoted\n+ return sg.select(\n+ sge.Star(\n+ replace=[\n+ self.f.st_aswkb(sg.column(col, quoted=quoted)).as_(\n+ col, quoted=quoted\n+ )\n+ for col in geocols\n+ ]\n+ )\n+ ).from_(sql.subquery())\n+\n def visit_StructColumn(self, op, *, names, values):\n return sge.Struct.from_arg_list(\n [\n@@ -614,3 +648,6 @@ class DuckDBCompiler(SQLGlotCompiler):\n .from_(parent)\n .join(unnest, join_type=\"CROSS\" if not keep_empty else \"LEFT\")\n )\n+\n+\n+compiler = DuckDBCompiler()\n", "exasol.py": "@@ -250,3 +250,6 @@ class ExasolCompiler(SQLGlotCompiler):\n \n def visit_BitwiseXor(self, op, *, left, right):\n return self.cast(self.f.bit_xor(left, right), op.dtype)\n+\n+\n+compiler = ExasolCompiler()\n", "flink.py": "@@ -563,3 +563,6 @@ class FlinkCompiler(SQLGlotCompiler):\n \n def visit_StructColumn(self, op, *, names, values):\n return self.cast(sge.Struct(expressions=list(values)), op.dtype)\n+\n+\n+compiler = FlinkCompiler()\n", "impala.py": "@@ -320,3 +320,6 @@ class ImpalaCompiler(SQLGlotCompiler):\n if not dtype.is_float32():\n return self.cast(sign, dtype)\n return sign\n+\n+\n+compiler = ImpalaCompiler()\n", "mssql.py": "@@ -1,6 +1,7 @@\n from __future__ import annotations\n \n import calendar\n+from typing import TYPE_CHECKING, Any\n \n import sqlglot as sg\n import sqlglot.expressions as sge\n@@ -26,6 +27,11 @@ from ibis.backends.sql.rewrites import (\n )\n from ibis.common.deferred import var\n \n+if TYPE_CHECKING:\n+ from collections.abc import Mapping\n+\n+ import ibis.expr.operations as ir\n+\n y = var(\"y\")\n start = var(\"start\")\n end = var(\"end\")\n@@ -133,17 +139,9 @@ class MSSQLCompiler(SQLGlotCompiler):\n ops.Max: \"max\",\n }\n \n- @property\n- def NAN(self):\n- return self.f.double(\"NaN\")\n-\n- @property\n- def POS_INF(self):\n- return self.f.double(\"Infinity\")\n-\n- @property\n- def NEG_INF(self):\n- return self.f.double(\"-Infinity\")\n+ NAN = sg.func(\"double\", sge.convert(\"NaN\"))\n+ POS_INF = sg.func(\"double\", sge.convert(\"Infinity\"))\n+ NEG_INF = sg.func(\"double\", sge.convert(\"-Infinity\"))\n \n @staticmethod\n def _generate_groups(groups):\n@@ -160,7 +158,28 @@ class MSSQLCompiler(SQLGlotCompiler):\n return None\n return spec\n \n- def visit_RandomUUID(self, op, **kwargs):\n+ def to_sqlglot(\n+ self,\n+ expr: ir.Expr,\n+ *,\n+ limit: str | None = None,\n+ params: Mapping[ir.Expr, Any] | None = None,\n+ ):\n+ \"\"\"Compile an Ibis expression to a sqlglot object.\"\"\"\n+ import ibis\n+\n+ table_expr = expr.as_table()\n+ conversions = {\n+ name: ibis.ifelse(table_expr[name], 1, 0).cast(dt.boolean)\n+ for name, typ in table_expr.schema().items()\n+ if typ.is_boolean()\n+ }\n+\n+ if conversions:\n+ table_expr = table_expr.mutate(**conversions)\n+ return super().to_sqlglot(table_expr, limit=limit, params=params)\n+\n+ def visit_RandomUUID(self, op, **_):\n return self.f.newid()\n \n def visit_StringLength(self, op, *, arg):\n@@ -480,3 +499,6 @@ class MSSQLCompiler(SQLGlotCompiler):\n result = result.order_by(*sort_keys, copy=False)\n \n return result\n+\n+\n+compiler = MSSQLCompiler()\n", "mysql.py": "@@ -377,3 +377,6 @@ class MySQLCompiler(SQLGlotCompiler):\n self.if_(arg.eq(sge.convert(\"true\")), 1, 0),\n NULL,\n )\n+\n+\n+compiler = MySQLCompiler()\n", "oracle.py": "@@ -459,3 +459,6 @@ class OracleCompiler(SQLGlotCompiler):\n out = sge.WithinGroup(this=out, expression=sge.Order(expressions=order_by))\n \n return out\n+\n+\n+compiler = OracleCompiler()\n", "postgres.py": "@@ -1,7 +1,11 @@\n from __future__ import annotations\n \n+import inspect\n import string\n+import textwrap\n from functools import partial, reduce\n+from itertools import takewhile\n+from typing import TYPE_CHECKING, Any\n \n import sqlglot as sg\n import sqlglot.expressions as sge\n@@ -14,8 +18,20 @@ from ibis.backends.sql.compilers.base import NULL, STAR, AggGen, SQLGlotCompiler\n from ibis.backends.sql.datatypes import PostgresType\n from ibis.backends.sql.dialects import Postgres\n from ibis.backends.sql.rewrites import exclude_nulls_from_array_collect\n+from ibis.common.exceptions import InvalidDecoratorError\n from ibis.util import gen_name\n \n+if TYPE_CHECKING:\n+ from collections.abc import Mapping\n+\n+ import ibis.expr.types as ir\n+\n+\n+def _verify_source_line(func_name: str, line: str):\n+ if line.startswith(\"@\"):\n+ raise InvalidDecoratorError(func_name, line)\n+ return line\n+\n \n class PostgresUDFNode(ops.Value):\n shape = rlz.shape_like(\"args\")\n@@ -99,6 +115,64 @@ class PostgresCompiler(SQLGlotCompiler):\n ops.TimeFromHMS: \"make_time\",\n }\n \n+ def to_sqlglot(\n+ self,\n+ expr: ir.Expr,\n+ *,\n+ limit: str | None = None,\n+ params: Mapping[ir.Expr, Any] | None = None,\n+ ):\n+ table_expr = expr.as_table()\n+ geocols = table_expr.schema().geospatial\n+ conversions = {name: table_expr[name].as_ewkb() for name in geocols}\n+\n+ if conversions:\n+ table_expr = table_expr.mutate(**conversions)\n+ return super().to_sqlglot(table_expr, limit=limit, params=params)\n+\n+ def _compile_python_udf(self, udf_node: ops.ScalarUDF):\n+ config = udf_node.__config__\n+ func = udf_node.__func__\n+ func_name = func.__name__\n+\n+ lines, _ = inspect.getsourcelines(func)\n+ iter_lines = iter(lines)\n+\n+ function_premable_lines = list(\n+ takewhile(lambda line: not line.lstrip().startswith(\"def \"), iter_lines)\n+ )\n+\n+ if len(function_premable_lines) > 1:\n+ raise InvalidDecoratorError(\n+ name=func_name, lines=\"\".join(function_premable_lines)\n+ )\n+\n+ source = textwrap.dedent(\n+ \"\".join(map(partial(_verify_source_line, func_name), iter_lines))\n+ ).strip()\n+\n+ type_mapper = self.type_mapper\n+ argnames = udf_node.argnames\n+ return \"\"\"\\\n+ CREATE OR REPLACE FUNCTION {ident}({signature})\n+ RETURNS {return_type}\n+ LANGUAGE {language}\n+ AS $$\n+ {source}\n+ return {name}({args})\n+ $$\"\"\".format(\n+ name=type(udf_node).__name__,\n+ ident=self.__sql_name__(udf_node),\n+ signature=\", \".join(\n+ f\"{argname} {type_mapper.to_string(arg.dtype)}\"\n+ for argname, arg in zip(argnames, udf_node.args)\n+ ),\n+ return_type=type_mapper.to_string(udf_node.dtype),\n+ language=config.get(\"language\", \"plpython3u\"),\n+ source=source,\n+ args=\", \".join(argnames),\n+ )\n+\n def visit_RandomUUID(self, op, **kwargs):\n return self.f.gen_random_uuid()\n \n@@ -699,3 +773,6 @@ class PostgresCompiler(SQLGlotCompiler):\n \n def visit_ArrayAll(self, op, *, arg):\n return self._array_reduction(arg=arg, reduction=\"bool_and\")\n+\n+\n+compiler = PostgresCompiler()\n", "pyspark.py": "@@ -634,3 +634,6 @@ class PySparkCompiler(SQLGlotCompiler):\n \n def visit_ArrayMean(self, op, *, arg):\n return self._array_reduction(dtype=op.dtype, arg=arg, output=operator.truediv)\n+\n+\n+compiler = PySparkCompiler()\n", "risingwave.py": "@@ -95,3 +95,6 @@ class RisingWaveCompiler(PostgresCompiler):\n elif dtype.is_json():\n return sge.convert(str(value))\n return None\n+\n+\n+compiler = RisingWaveCompiler()\n", "snowflake.py": "@@ -1,6 +1,10 @@\n from __future__ import annotations\n \n+import inspect\n import itertools\n+import platform\n+import sys\n+import textwrap\n from functools import partial\n \n import sqlglot as sg\n@@ -36,6 +40,8 @@ class SnowflakeFuncGen(FuncGen):\n class SnowflakeCompiler(SQLGlotCompiler):\n __slots__ = ()\n \n+ latest_udf_python_version = (3, 11)\n+\n dialect = Snowflake\n type_mapper = SnowflakeType\n no_limit_value = NULL\n@@ -95,6 +101,94 @@ class SnowflakeCompiler(SQLGlotCompiler):\n super().__init__()\n self.f = SnowflakeFuncGen()\n \n+ _UDF_TEMPLATES = {\n+ ops.udf.InputType.PYTHON: \"\"\"\\\n+{preamble}\n+HANDLER = '{func_name}'\n+AS $$\n+from __future__ import annotations\n+\n+from typing import *\n+\n+{source}\n+$$\"\"\",\n+ ops.udf.InputType.PANDAS: \"\"\"\\\n+{preamble}\n+HANDLER = 'wrapper'\n+AS $$\n+from __future__ import annotations\n+\n+from typing import *\n+\n+import _snowflake\n+import pandas as pd\n+\n+{source}\n+\n+@_snowflake.vectorized(input=pd.DataFrame)\n+def wrapper(df):\n+ return {func_name}(*(col for _, col in df.items()))\n+$$\"\"\",\n+ }\n+\n+ _UDF_PREAMBLE_LINES = (\n+ \"CREATE OR REPLACE TEMPORARY FUNCTION {name}({signature})\",\n+ \"RETURNS {return_type}\",\n+ \"LANGUAGE PYTHON\",\n+ \"IMMUTABLE\",\n+ \"RUNTIME_VERSION = '{version}'\",\n+ \"COMMENT = '{comment}'\",\n+ )\n+\n+ def _compile_udf(self, udf_node: ops.ScalarUDF):\n+ import ibis\n+\n+ name = type(udf_node).__name__\n+ signature = \", \".join(\n+ f\"{name} {self.type_mapper.to_string(arg.dtype)}\"\n+ for name, arg in zip(udf_node.argnames, udf_node.args)\n+ )\n+ return_type = SnowflakeType.to_string(udf_node.dtype)\n+ lines, _ = inspect.getsourcelines(udf_node.__func__)\n+ source = textwrap.dedent(\n+ \"\".join(\n+ itertools.dropwhile(\n+ lambda line: not line.lstrip().startswith(\"def \"), lines\n+ )\n+ )\n+ ).strip()\n+\n+ config = udf_node.__config__\n+\n+ preamble_lines = [*self._UDF_PREAMBLE_LINES]\n+\n+ if imports := config.get(\"imports\"):\n+ preamble_lines.append(f\"IMPORTS = ({', '.join(map(repr, imports))})\")\n+\n+ packages = \"({})\".format(\n+ \", \".join(map(repr, (\"pandas\", *config.get(\"packages\", ()))))\n+ )\n+ preamble_lines.append(f\"PACKAGES = {packages}\")\n+\n+ template = self._UDF_TEMPLATES[udf_node.__input_type__]\n+ return template.format(\n+ source=source,\n+ name=name,\n+ func_name=udf_node.__func_name__,\n+ preamble=\"\\n\".join(preamble_lines).format(\n+ name=name,\n+ signature=signature,\n+ return_type=return_type,\n+ comment=f\"Generated by ibis {ibis.__version__} using Python {platform.python_version()}\",\n+ version=\".\".join(\n+ map(str, min(sys.version_info[:2], self.latest_udf_python_version))\n+ ),\n+ ),\n+ )\n+\n+ _compile_pandas_udf = _compile_udf\n+ _compile_python_udf = _compile_udf\n+\n @staticmethod\n def _minimize_spec(start, end, spec):\n if (\n@@ -774,3 +868,6 @@ class SnowflakeCompiler(SQLGlotCompiler):\n \n def visit_ArrayMean(self, op, *, arg):\n return self.cast(self.f.udf.array_avg(arg), op.dtype)\n+\n+\n+compiler = SnowflakeCompiler()\n", "sqlite.py": "@@ -480,3 +480,6 @@ class SQLiteCompiler(SQLGlotCompiler):\n ):\n raise com.UnsupportedBackendType(f\"Unsupported type: {dtype!r}\")\n return super().visit_NonNullLiteral(op, value=value, dtype=dtype)\n+\n+\n+compiler = SQLiteCompiler()\n", "trino.py": "@@ -652,3 +652,6 @@ class TrinoCompiler(SQLGlotCompiler):\n \n def visit_ArrayMean(self, op, *, arg):\n return self.visit_ArraySumAgg(op, arg=arg, output=operator.truediv)\n+\n+\n+compiler = TrinoCompiler()\n", "test_generic.py": "@@ -1384,22 +1384,6 @@ def test_memtable_column_naming_mismatch(con, monkeypatch, df, columns):\n ibis.memtable(df, columns=columns)\n \n \[email protected](\n- [\"dask\", \"pandas\", \"polars\"], raises=NotImplementedError, reason=\"not a SQL backend\"\n-)\n-def test_many_subqueries(con, snapshot):\n- def query(t, group_cols):\n- t2 = t.mutate(key=ibis.row_number().over(ibis.window(order_by=group_cols)))\n- return t2.inner_join(t2[[\"key\"]], \"key\")\n-\n- t = ibis.table(dict(street=\"str\"), name=\"data\")\n-\n- t2 = query(t, group_cols=[\"street\"])\n- t3 = query(t2, group_cols=[\"street\"])\n-\n- snapshot.assert_match(str(ibis.to_sql(t3, dialect=con.name)), \"out.sql\")\n-\n-\n @pytest.mark.notimpl([\"oracle\", \"exasol\"], raises=com.OperationNotDefinedError)\n @pytest.mark.notimpl([\"druid\"], raises=AssertionError)\n @pytest.mark.notyet(\n@@ -2289,18 +2273,11 @@ def test_sample_with_seed(backend):\n backend.assert_frame_equal(df1, df2)\n \n \[email protected](\n- [\"dask\", \"pandas\", \"polars\"], raises=NotImplementedError, reason=\"not a SQL backend\"\n-)\n def test_simple_memtable_construct(con):\n t = ibis.memtable({\"a\": [1, 2]})\n expr = t.a\n expected = [1.0, 2.0]\n assert sorted(con.to_pandas(expr).tolist()) == expected\n- # we can't generically check for specific sql, even with a snapshot,\n- # because memtables have a unique name per table per process, so smoke test\n- # it\n- assert str(ibis.to_sql(expr, dialect=con.name)).startswith(\"SELECT\")\n \n \n def test_select_mutate_with_dict(backend):\n@@ -2490,3 +2467,14 @@ def test_value_counts_on_tables(backend, df):\n )\n expected = expected.sort_values(expected.columns.tolist()).reset_index(drop=True)\n backend.assert_frame_equal(result, expected, check_dtype=False)\n+\n+\n+def test_union_generates_predictable_aliases(con):\n+ t = ibis.memtable(\n+ data=[{\"island\": \"Torgerson\", \"body_mass_g\": 3750, \"sex\": \"male\"}]\n+ )\n+ sub1 = t.inner_join(t.view(), \"island\").mutate(island_right=lambda t: t.island)\n+ sub2 = t.inner_join(t.view(), \"sex\").mutate(sex_right=lambda t: t.sex)\n+ expr = ibis.union(sub1, sub2)\n+ df = con.execute(expr)\n+ assert len(df) == 2\n", "test_sql.py": "@@ -42,17 +42,12 @@ sg = pytest.importorskip(\"sqlglot\")\n ),\n ],\n )\[email protected](\n- [\"pandas\", \"dask\"],\n- raises=(exc.IbisError, NotImplementedError, ValueError),\n- reason=\"Not a SQL backend\",\n-)\[email protected]([\"polars\"], reason=\"Not clear how to extract SQL from the backend\")\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n def test_literal(backend, expr):\n assert \"432\" in ibis.to_sql(expr, dialect=backend.name())\n \n \[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\")\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n def test_group_by_has_index(backend, snapshot):\n countries = ibis.table(\n dict(continent=\"string\", population=\"int64\"), name=\"countries\"\n@@ -75,7 +70,7 @@ def test_group_by_has_index(backend, snapshot):\n snapshot.assert_match(sql, \"out.sql\")\n \n \[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\")\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n def test_cte_refs_in_topo_order(backend, snapshot):\n mr0 = ibis.table(schema=ibis.schema(dict(key=\"int\")), name=\"leaf\")\n \n@@ -88,7 +83,7 @@ def test_cte_refs_in_topo_order(backend, snapshot):\n snapshot.assert_match(sql, \"out.sql\")\n \n \[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\")\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n def test_isin_bug(con, snapshot):\n t = ibis.table(dict(x=\"int\"), name=\"t\")\n good = t[t.x > 2].x\n@@ -96,11 +91,7 @@ def test_isin_bug(con, snapshot):\n snapshot.assert_match(str(ibis.to_sql(expr, dialect=con.name)), \"out.sql\")\n \n \[email protected](\n- [\"pandas\", \"dask\", \"polars\"],\n- reason=\"not SQL\",\n- raises=NotImplementedError,\n-)\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n @pytest.mark.notyet(\n [\"exasol\", \"oracle\", \"flink\"],\n reason=\"no unnest support\",\n@@ -165,22 +156,7 @@ def test_union_aliasing(backend_name, snapshot):\n snapshot.assert_match(str(ibis.to_sql(result, dialect=backend_name)), \"out.sql\")\n \n \n-def test_union_generates_predictable_aliases(con):\n- t = ibis.memtable(\n- data=[{\"island\": \"Torgerson\", \"body_mass_g\": 3750, \"sex\": \"male\"}]\n- )\n- sub1 = t.inner_join(t.view(), \"island\").mutate(island_right=lambda t: t.island)\n- sub2 = t.inner_join(t.view(), \"sex\").mutate(sex_right=lambda t: t.sex)\n- expr = ibis.union(sub1, sub2)\n- df = con.execute(expr)\n- assert len(df) == 2\n-\n-\[email protected](\n- [\"pandas\", \"dask\", \"polars\"],\n- reason=\"not SQL\",\n- raises=NotImplementedError,\n-)\[email protected]([\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=ValueError)\n @pytest.mark.parametrize(\n \"value\",\n [\n@@ -204,11 +180,28 @@ def test_selects_with_impure_operations_not_merged(con, snapshot, value):\n snapshot.assert_match(sql, \"out.sql\")\n \n \[email protected]([\"polars\"], reason=\"no sql generation\")\[email protected]([\"pandas\", \"dask\"], reason=\"no sql generation\")\[email protected](\n+ [\"pandas\", \"dask\", \"polars\"], reason=\"not SQL\", raises=NotImplementedError\n+)\n def test_to_sql_default_backend(con, snapshot, monkeypatch):\n monkeypatch.setattr(ibis.options, \"default_backend\", con)\n \n t = ibis.memtable({\"b\": [1, 2]}, name=\"mytable\")\n expr = t.select(\"b\").count()\n snapshot.assert_match(ibis.to_sql(expr), \"to_sql.sql\")\n+\n+\[email protected](\n+ [\"dask\", \"pandas\", \"polars\"], raises=ValueError, reason=\"not a SQL backend\"\n+)\n+def test_many_subqueries(backend_name, snapshot):\n+ def query(t, group_cols):\n+ t2 = t.mutate(key=ibis.row_number().over(ibis.window(order_by=group_cols)))\n+ return t2.inner_join(t2[[\"key\"]], \"key\")\n+\n+ t = ibis.table(dict(street=\"str\"), name=\"data\")\n+\n+ t2 = query(t, group_cols=[\"street\"])\n+ t3 = query(t2, group_cols=[\"street\"])\n+\n+ snapshot.assert_match(str(ibis.to_sql(t3, dialect=backend_name)), \"out.sql\")\n", "schema.py": "@@ -63,6 +63,10 @@ class Schema(Concrete, Coercible, MapSet):\n def types(self):\n return tuple(self.values())\n \n+ @attribute\n+ def geospatial(self) -> tuple[str, ...]:\n+ return tuple(name for name, typ in self.fields.items() if typ.is_geospatial())\n+\n @attribute\n def _name_locs(self) -> dict[str, int]:\n return {v: i for i, v in enumerate(self.names)}\n", "sql.py": "@@ -362,26 +362,28 @@ def to_sql(\n Formatted SQL string\n \n \"\"\"\n+ import ibis.backends.sql.compilers as sc\n+\n # try to infer from a non-str expression or if not possible fallback to\n # the default pretty dialect for expressions\n if dialect is None:\n try:\n- backend = expr._find_backend(use_default=True)\n+ compiler_provider = expr._find_backend(use_default=True)\n except com.IbisError:\n # default to duckdb for SQL compilation because it supports the\n # widest array of ibis features for SQL backends\n- backend = ibis.duckdb\n- dialect = ibis.options.sql.default_dialect\n- else:\n- dialect = backend.dialect\n+ compiler_provider = sc.duckdb\n else:\n try:\n- backend = getattr(ibis, dialect)\n- except AttributeError:\n- raise ValueError(f\"Unknown dialect {dialect}\")\n- else:\n- dialect = getattr(backend, \"dialect\", dialect)\n+ compiler_provider = getattr(sc, dialect)\n+ except AttributeError as e:\n+ raise ValueError(f\"Unknown dialect {dialect}\") from e\n+\n+ if (compiler := getattr(compiler_provider, \"compiler\", None)) is None:\n+ raise NotImplementedError(f\"{compiler_provider} is not a SQL backend\")\n \n- sg_expr = backend._to_sqlglot(expr.unbind(), **kwargs)\n- sql = sg_expr.sql(dialect=dialect, pretty=pretty)\n+ out = compiler.to_sqlglot(expr.unbind(), **kwargs)\n+ queries = out if isinstance(out, list) else [out]\n+ dialect = compiler.dialect\n+ sql = \";\\n\".join(query.sql(dialect=dialect, pretty=pretty) for query in queries)\n return SQLString(sql)\n", "mocks.py": "@@ -53,11 +53,6 @@ class MockBackend(BaseBackend):\n def list_databases(self):\n return [\"mockdb\"]\n \n- def _to_sqlglot(self, expr, **kwargs):\n- import ibis\n-\n- return ibis.duckdb._to_sqlglot(expr, **kwargs)\n-\n def fetch_from_cursor(self, cursor, schema):\n pass\n \n", "test_sql_builtins.py": "@@ -16,6 +16,7 @@ from __future__ import annotations\n import pytest\n \n import ibis\n+import ibis.backends.sql.compilers as sc\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n@@ -223,3 +224,11 @@ def test_no_arguments_errors(function):\n SignatureValidationError, match=\".+ has failed due to the following errors:\"\n ):\n function()\n+\n+\[email protected](\n+ \"name\", [name.lower().removesuffix(\"compiler\") for name in sc.__all__]\n+)\n+def test_compile_without_dependencies(name):\n+ table = ibis.table({\"a\": \"int64\"}, name=\"t\")\n+ assert isinstance(ibis.to_sql(table, dialect=name), str)\n"}
fix(duckdb): use functions for temporal literals
b1407f8d1ca524f8ff8f4622200acd040874de1f
fix
https://github.com/ibis-project/ibis/commit/b1407f8d1ca524f8ff8f4622200acd040874de1f
use functions for temporal literals
{"registry.py": "@@ -208,8 +208,14 @@ def _literal(t, op):\n return sa.func.map(\n sa.func.list_value(*value.keys()), sa.func.list_value(*value.values())\n )\n+ elif dtype.is_timestamp():\n+ return sa.cast(value.isoformat(), t.get_sqla_type(dtype))\n elif dtype.is_date():\n- return sa.cast(sa.literal(str(value)), sqla_type)\n+ return sa.func.make_date(value.year, value.month, value.day)\n+ elif dtype.is_time():\n+ return sa.func.make_time(\n+ value.hour, value.minute, value.second + value.microsecond / 1e6\n+ )\n else:\n return sa.cast(sa.literal(value), sqla_type)\n \n", "out.sql": "@@ -0,0 +1,2 @@\n+SELECT\n+ CAST('04:05:06.230136' AS TIME) AS \"datetime.time(4, 5, 6, 230136)\"\n\\ No newline at end of file\n"}
test: ignore coverage on some fallbacks
0cad3ed1790b39979b5f3732f770de3e1083449b
test
https://github.com/mikro-orm/mikro-orm/commit/0cad3ed1790b39979b5f3732f770de3e1083449b
ignore coverage on some fallbacks
{"DatabaseDriver.ts": "@@ -189,12 +189,14 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n def = Cursor.for<T>(meta, def, orderBy);\n }\n \n+ /* istanbul ignore next */\n const offsets = def ? Cursor.decode(def as string) as Dictionary[] : [];\n \n if (definition.length === offsets.length) {\n return this.createCursorCondition<T>(definition, offsets, inverse);\n }\n \n+ /* istanbul ignore next */\n return {} as FilterQuery<T>;\n };\n \n@@ -230,7 +232,8 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n };\n }\n \n- protected createCursorCondition<T extends object>(definition: (readonly [keyof T & string, QueryOrder])[], offsets: Dictionary[], inverse = false): FilterQuery<T> {\n+ /* istanbul ignore next */\n+ protected createCursorCondition<T extends object>(definition: (readonly [keyof T & string, QueryOrder])[], offsets: Dictionary[], inverse: boolean): FilterQuery<T> {\n const createCondition = (prop: string, direction: QueryOrderKeys<T>, offset: Dictionary, eq = false) => {\n if (Utils.isPlainObject(direction)) {\n const value = Utils.keys(direction).reduce((o, key) => {\n@@ -286,7 +289,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n return;\n }\n \n- if (prop.embeddedProps && (prop.object || object)) {\n+ if (prop.embeddedProps && (object || prop.object)) {\n const copy = data[k];\n delete data[k];\n \n@@ -321,6 +324,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n data[k] = raw(sql.replace(/\\?/g, '\\\\?'));\n }\n \n+ /* istanbul ignore next */\n if (!prop.customType && (Array.isArray(data[k]) || Utils.isPlainObject(data[k]))) {\n data[k] = JSON.stringify(data[k]);\n }\n@@ -334,6 +338,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n }\n \n protected inlineEmbeddables<T extends object>(meta: EntityMetadata<T>, data: T, where?: boolean): void {\n+ /* istanbul ignore next */\n if (data == null) {\n return;\n }\n@@ -380,6 +385,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD\n \n // we might be using some native JSON operator, e.g. with mongodb's `$geoWithin` or `$exists`\n if (props[kk]) {\n+ /* istanbul ignore next */\n inline(data[prop.name], props[kk] || props[parentPropName], [prop.name]);\n } else if (props[parentPropName]) {\n data[`${prop.name}.${kk}` as keyof T] = (data[prop.name] as Dictionary)[kk];\n"}
chore: remove eslint plugin for jsdoc
d7ba4420f52fcd65c616636f307dfd9a491162e5
chore
https://github.com/mikro-orm/mikro-orm/commit/d7ba4420f52fcd65c616636f307dfd9a491162e5
remove eslint plugin for jsdoc
{".eslintrc.js": "@@ -18,7 +18,6 @@ module.exports = {\n },\n plugins: [\n '@typescript-eslint',\n- 'jsdoc',\n ],\n rules: {\n '@typescript-eslint/prefer-optional-chain': 'error',\n@@ -123,7 +122,6 @@ module.exports = {\n 'dot-notation': 'error',\n 'eol-last': 'error',\n 'eqeqeq': ['error', 'always', {\"null\": \"ignore\"}],\n- 'jsdoc/no-types': 'error',\n 'no-console': 'error',\n 'no-duplicate-imports': 'error',\n 'no-multiple-empty-lines': 'error',\n", "package.json": "@@ -131,7 +131,6 @@\n \"conditional-type-checks\": \"1.0.5\",\n \"coveralls\": \"3.1.1\",\n \"eslint\": \"7.32.0\",\n- \"eslint-plugin-jsdoc\": \"36.0.6\",\n \"fs-extra\": \"10.0.0\",\n \"gen-esm-wrapper\": \"1.1.2\",\n \"guid-typescript\": \"1.0.9\",\n", "yarn.lock": "Binary files a/yarn.lock and b/yarn.lock differ\n"}
style: sort `ruff` config
6305b06e5d1da5f843f02fcad338e79acbd90b5a
style
https://github.com/ibis-project/ibis/commit/6305b06e5d1da5f843f02fcad338e79acbd90b5a
sort `ruff` config
{"pyproject.toml": "@@ -299,41 +299,33 @@ markers = [\n line-length = 88\n select = [\n \"B\", # flake8-bugbear\n+ \"BLE\", # flake8-blind-except\n+ \"C4\", # comprehensions\n \"D\", # pydocstyle\n \"E\", # pycodestyle\n- \"W\", # pycodestyle\n \"F\", # pyflakes\n \"I\", # isort\n- \"UP\", # pyupgrade\n- \"RUF\", # ruff-specific rules\n- \"TID\", # flake8-tidy-imports\n- \"T10\", # flake8-debugger\n+ \"ICN\", # flake8-import-conventions\n+ \"ISC\", # flake8-implicit-str-concat\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n \"PLW\", # pylint\n+ \"RET\", # flake8-return\n+ \"RUF\", # ruff-specific rules\n \"SIM\", # flake8-simplify\n+ \"T10\", # flake8-debugger\n \"T20\", # flake8-print\n- \"ICN\", # flake8-import-conventions\n- \"RET\", # flake8-return\n- \"BLE\", # flake8-blind-except\n- \"ISC\", # flake8-implicit-str-concat\n+ \"TID\", # flake8-tidy-imports\n+ \"UP\", # pyupgrade\n+ \"W\", # pycodestyle\n \"YTT\", # flake8-2020\n- \"C4\", # comprehensions\n \n ]\n respect-gitignore = true\n ignore = [\n \"B904\", # raise from e or raise from None in exception handlers\n \"C408\", # dict(...) as literal\n- \"E501\",\n- \"E731\",\n- \"PGH003\",\n- \"RET504\",\n- \"RET505\",\n- \"RET506\",\n- \"RET507\",\n- \"RET508\",\n \"D100\", # public module\n \"D101\", # public class\n \"D102\", # public method\n@@ -344,7 +336,14 @@ ignore = [\n \"D203\", # blank line before class docstring\n \"D213\", # Multi-line docstring summary should start at the second line\n \"D402\", # First line should not be the function's signature\n-\n+ \"E501\",\n+ \"E731\",\n+ \"PGH003\",\n+ \"RET504\",\n+ \"RET505\",\n+ \"RET506\",\n+ \"RET507\",\n+ \"RET508\",\n ]\n exclude = [\"*_py310.py\", \"ibis/tests/*/snapshots/*\"]\n target-version = \"py38\"\n"}
fix: recursive type bug
5ef128811318d9c1e4c6281dfba60c7a0d7c8574
fix
https://github.com/erg-lang/erg/commit/5ef128811318d9c1e4c6281dfba60c7a0d7c8574
recursive type bug
{"macros.rs": "@@ -495,6 +495,14 @@ macro_rules! log {\n }\n }};\n \n+ (backtrace) => {{\n+ if cfg!(feature = \"debug\") {\n+ use $crate::style::*;\n+ $crate::debug_info!();\n+ println!(\"\\n{}\", std::backtrace::Backtrace::capture());\n+ }\n+ }};\n+\n ($($arg: tt)*) => {{\n if cfg!(feature = \"debug\") {\n use $crate::style::*;\n", "register.rs": "@@ -785,7 +785,7 @@ impl Context {\n readable_name(name.inspect()),\n &expect,\n &found,\n- e.core.get_hint().map(|s| s.to_string()),\n+ // e.core.get_hint().map(|s| s.to_string()),\n )\n })\n .collect(),\n", "unify.rs": "@@ -145,10 +145,10 @@ impl<'c, 'l, L: Locational> Unifier<'c, 'l, L> {\n self.occur_inner(lhs, l)?;\n self.occur_inner(lhs, r)\n }\n- /*(Or(l, r), rhs) | (And(l, r), rhs) => {\n- self.occur_inner(l, rhs, loc)?;\n- self.occur_inner(r, rhs, loc)\n- }*/\n+ (Or(l, r), rhs) | (And(l, r), rhs) => {\n+ self.occur_inner(l, rhs)?;\n+ self.occur_inner(r, rhs)\n+ }\n _ => Ok(()),\n }\n }\n@@ -158,7 +158,7 @@ impl<'c, 'l, L: Locational> Unifier<'c, 'l, L> {\n (FreeVar(fv), _) if fv.is_linked() => self.occur_inner(&fv.crack(), maybe_sup),\n (_, FreeVar(fv)) if fv.is_linked() => self.occur_inner(maybe_sub, &fv.crack()),\n (FreeVar(sub), FreeVar(sup)) => {\n- if sub.is_unbound() && sup.is_unbound() && sub == sup {\n+ if sub.is_unbound() && sup.is_unbound() && sub.addr_eq(sup) {\n Err(TyCheckErrors::from(TyCheckError::subtyping_error(\n self.ctx.cfg.input.clone(),\n line!() as usize,\n@@ -168,6 +168,20 @@ impl<'c, 'l, L: Locational> Unifier<'c, 'l, L> {\n self.ctx.caused_by(),\n )))\n } else {\n+ if sub.constraint_is_sandwiched() {\n+ let (sub_t, sup_t) = sub.get_subsup().unwrap();\n+ sub.do_avoiding_recursion(|| {\n+ self.occur_inner(&sub_t, maybe_sup)?;\n+ self.occur_inner(&sup_t, maybe_sup)\n+ })?;\n+ }\n+ if sup.constraint_is_sandwiched() {\n+ let (sub_t, sup_t) = sup.get_subsup().unwrap();\n+ sup.do_avoiding_recursion(|| {\n+ self.occur_inner(maybe_sub, &sub_t)?;\n+ self.occur_inner(maybe_sub, &sup_t)\n+ })?;\n+ }\n Ok(())\n }\n }\n", "mod.rs": "@@ -590,7 +590,6 @@ mod test {\n name,\n &expect,\n &found,\n- None,\n );\n errors.push(err);\n \n", "tycheck.rs": "@@ -178,7 +178,7 @@ impl TyCheckError {\n name: &str,\n expect: &Type,\n found: &Type,\n- hint: Option<String>,\n+ // hint: Option<String>,\n ) -> Self {\n let name = name.with_color(Color::Yellow);\n let mut expct = StyledStrings::default();\n@@ -198,13 +198,19 @@ impl TyCheckError {\n \"english\" =>fnd.push_str(\"but found: \"),\n );\n fnd.push_str_with_color_and_attr(format!(\"{found}\"), ERR, ATTR);\n+ let hint = switch_lang!(\n+ \"japanese\" => \"\u81ea\u8eab\u3092\u8fd4\u3059\u95a2\u6570\u306f\u5b9a\u7fa9\u3067\u304d\u307e\u305b\u3093\",\n+ \"simplified_chinese\" => \"\u4e0d\u80fd\u5b9a\u4e49\u8fd4\u56de\u81ea\u8eab\u7684\u51fd\u6570\",\n+ \"traditional_chinese\" => \"\u4e0d\u80fd\u5b9a\u7fa9\u8fd4\u56de\u81ea\u8eab\u7684\u51fd\u6578\",\n+ \"english\" => \"cannot define a function that returns itself\",\n+ );\n \n Self::new(\n ErrorCore::new(\n vec![SubMessage::ambiguous_new(\n loc,\n vec![expct.to_string(), fnd.to_string()],\n- hint,\n+ Some(hint.into()),\n )],\n switch_lang!(\n \"japanese\" => format!(\"{name}\u306e\u623b\u308a\u5024\u306e\u578b\u304c\u9055\u3044\u307e\u3059\"),\n", "recursive_fn.er": "@@ -0,0 +1,7 @@\n+f() = f # ERR\n+\n+g 0 = 0\n+g _: Int = g # ERR\n+\n+# left() = right() # ERR\n+# right() = left\n", "test.rs": "@@ -473,6 +473,11 @@ fn exec_quantified_err() -> Result<(), ()> {\n expect_failure(\"tests/should_err/quantified.er\", 0, 3)\n }\n \n+#[test]\n+fn exec_recursive_fn_err() -> Result<(), ()> {\n+ expect_failure(\"tests/should_err/recursive_fn.er\", 0, 2)\n+}\n+\n #[test]\n fn exec_refinement_err() -> Result<(), ()> {\n expect_failure(\"tests/should_err/refinement.er\", 0, 8)\n"}
build: try to fix npm dev builds publishing
b64024ce995b70c867fe987a8e28202f12225b14
build
https://github.com/mikro-orm/mikro-orm/commit/b64024ce995b70c867fe987a8e28202f12225b14
try to fix npm dev builds publishing
{"nightly.yml": "@@ -27,8 +27,9 @@ jobs:\n \n - name: Create .npmrc\n run: |\n- echo -e \"access=public\" >> ~/.npmrc\n- echo -e \"//registry.npmjs.org/:_authToken=$NODE_AUTH_TOKEN\" >> ~/.npmrc\n+ pwd\n+ echo -e \"access=public\" >> .npmrc\n+ echo -e \"//registry.npmjs.org/:_authToken=$NODE_AUTH_TOKEN\" >> .npmrc\n \n - name: Install libkrb5-dev (for mongo)\n run: sudo apt install libkrb5-dev\n"}
fix(mongo): fix logged client url
f2179fac07edaf8d0d71c58bde79c9461f075a4a
fix
https://github.com/mikro-orm/mikro-orm/commit/f2179fac07edaf8d0d71c58bde79c9461f075a4a
fix logged client url
{"Connection.ts": "@@ -24,7 +24,7 @@ export abstract class Connection {\n abstract async close(force?: boolean): Promise<void>;\n \n /**\n- * Returns default client url for given driver (e.g. mongodb://localhost:27017 for mongodb)\n+ * Returns default client url for given driver (e.g. mongodb://127.0.0.1:27017 for mongodb)\n */\n abstract getDefaultClientUrl(): string;\n \n", "MongoConnection.ts": "@@ -27,7 +27,7 @@ export class MongoConnection extends Connection {\n }\n \n getDefaultClientUrl(): string {\n- return 'mongodb://localhost:27017';\n+ return 'mongodb://127.0.0.1:27017';\n }\n \n getConnectionOptions(): MongoClientOptions & ConnectionConfig {\n@@ -42,6 +42,14 @@ export class MongoConnection extends Connection {\n return ret;\n }\n \n+ getClientUrl(): string {\n+ const options = this.getConnectionOptions();\n+ const clientUrl = this.config.getClientUrl(true);\n+ const match = clientUrl.match(/^(\\w+):\\/\\/((.*@.+)|.+)$/);\n+\n+ return match ? `${match[1]}://${options.auth ? options.auth.user + ':*****@' : ''}${match[2]}` : clientUrl;\n+ }\n+\n async execute(query: string): Promise<any> {\n throw new Error(`${this.constructor.name} does not support generic execute method`);\n }\n", "EntityManager.mongo.test.ts": "@@ -317,6 +317,23 @@ describe('EntityManagerMongo', () => {\n });\n });\n \n+ test('connection returns correct URL', async () => {\n+ const conn1 = new MongoConnection(new Configuration({\n+ clientUrl: 'mongodb://example.host.com:34500',\n+ dbName: 'test-db-name',\n+ user: 'usr',\n+ password: 'pw',\n+ } as any, false));\n+ await expect(conn1.getClientUrl()).toBe('mongodb://usr:*****@example.host.com:34500');\n+ const conn2 = new MongoConnection(new Configuration({ type: 'mongo' } as any, false));\n+ await expect(conn2.getClientUrl()).toBe('mongodb://127.0.0.1:27017');\n+ const clientUrl = 'mongodb://user:Q#ais@2d-Aa_43:[email protected]:27017,mongodb-replicaset-1.cluster.local:27018,...';\n+ const conn3 = new MongoConnection(new Configuration({ type: 'mongo', clientUrl } as any, false));\n+ await expect(conn3.getClientUrl()).toBe('mongodb://user:*****@mongodb-replicaset-0.cluster.local:27017,mongodb-replicaset-1.cluster.local:27018,...');\n+ const conn4 = new MongoConnection(new Configuration({ type: 'mongo', clientUrl: 'invalid-url-that-was-not-properly-parsed' } as any, false));\n+ await expect(conn4.getClientUrl()).toBe('invalid-url-that-was-not-properly-parsed');\n+ });\n+\n test('findOne by id', async () => {\n const authorRepository = orm.em.getRepository(Author);\n const jon = new Author('Jon Snow', '[email protected]');\n", "EntityManager.mysql.test.ts": "@@ -69,7 +69,7 @@ describe('EntityManagerMySql', () => {\n clientUrl: 'mysql://example.host.com',\n port: 1234,\n user: 'usr',\n- password: 'pw'\n+ password: 'pw',\n } as any, false));\n await expect(conn1.getClientUrl()).toBe('mysql://usr:*****@example.host.com:1234');\n const conn2 = new MySqlConnection(new Configuration({ type: 'mysql', port: 3307 } as any, false));\n", "EntityManager.postgre.test.ts": "@@ -69,7 +69,7 @@ describe('EntityManagerPostgre', () => {\n clientUrl: 'postgre://example.host.com',\n port: 1234,\n user: 'usr',\n- password: 'pw'\n+ password: 'pw',\n } as any, false));\n await expect(conn1.getClientUrl()).toBe('postgre://usr:*****@example.host.com:1234');\n const conn2 = new PostgreSqlConnection(new Configuration({ type: 'postgresql', port: 5433 } as any, false));\n", "MikroORM.test.ts": "@@ -27,14 +27,6 @@ describe('MikroORM', () => {\n await expect(MikroORM.init({ dbName: 'test', baseDir: BASE_DIR, entities: [FooBaz2], cache: { enabled: false }, entitiesDirsTs: ['entities'] })).rejects.toThrowError(error);\n });\n \n- test('should hide password from connection uri', async () => {\n- const clientUrl = 'mongodb://dev-vision:Q#ais@2d-Aa_43:ui!0d.ai6d@mongodb-replicaset-0.mongodb-replicaset.dev.svc.cluster.local:27017,mongodb-replicaset-1...';\n- const expected = 'mongodb://dev-vision:*****@mongodb-replicaset-0.mongodb-replicaset.dev.svc.cluster.local:27017,mongodb-replicaset-1...';\n- const conf = new Configuration({ clientUrl } as any, false);\n- const hidden = conf.getClientUrl(true);\n- expect(hidden).toBe(expected);\n- });\n-\n test('should init itself with entity manager', async () => {\n const orm = await MikroORM.init({\n entitiesDirs: ['entities'],\n"}
fix(sql): avoid calling .subquery on subqueries
7ad32bddceab812d162197968e30860caefe9ad9
fix
https://github.com/rohankumardubey/ibis/commit/7ad32bddceab812d162197968e30860caefe9ad9
avoid calling .subquery on subqueries
{"compiler.py": "@@ -461,10 +461,13 @@ class SQLGlotCompiler(abc.ABC):\n aliases[node] = alias\n \n alias = sg.to_identifier(alias, quoted=self.quoted)\n- try:\n- return result.subquery(alias)\n- except AttributeError:\n+ if isinstance(result, sge.Subquery):\n return result.as_(alias, quoted=self.quoted)\n+ else:\n+ try:\n+ return result.subquery(alias)\n+ except AttributeError:\n+ return result.as_(alias, quoted=self.quoted)\n \n # apply translate rules in topological order\n results = op.map(fn)\n@@ -1344,10 +1347,13 @@ class SQLGlotCompiler(abc.ABC):\n if isinstance(child, sge.Table):\n child = sg.select(STAR).from_(child)\n \n- try:\n- return child.subquery(name)\n- except AttributeError:\n- return child.as_(name)\n+ if isinstance(child, sge.Subquery):\n+ return child.as_(name, quoted=self.quoted)\n+ else:\n+ try:\n+ return child.subquery(name)\n+ except AttributeError:\n+ return child.as_(name, quoted=self.quoted)\n \n def visit_SQLStringView(self, op, *, query: str, child, schema):\n return sg.parse_one(query, read=self.dialect)\n"}
test: remove use of specific now comparisons in tests
98a0fa15d4cb0c156b2b3d175de1c72740b606a5
test
https://github.com/rohankumardubey/ibis/commit/98a0fa15d4cb0c156b2b3d175de1c72740b606a5
remove use of specific now comparisons in tests
{"test_compiler.py": "@@ -1,5 +1,6 @@\n import datetime\n import re\n+import time\n from operator import floordiv, methodcaller, truediv\n \n import pandas as pd\n@@ -260,10 +261,10 @@ def test_large_compile():\n table = table.mutate(dummy=ibis.literal(\"\"))\n table = table.left_join(table, [\"dummy\"])[[table]]\n \n- start = datetime.datetime.now()\n+ start = time.time()\n table.compile()\n- delta = datetime.datetime.now() - start\n- assert delta.total_seconds() < 10\n+ delta = time.time() - start\n+ assert delta < 10\n \n \n @pytest.mark.parametrize(\n", "test_functions.py": "@@ -1,4 +1,3 @@\n-import datetime\n import math\n import sqlite3\n import uuid\n@@ -71,13 +70,6 @@ def test_timestamp_functions(con):\n assert con.execute(expr) == expected\n \n \n-def test_now(con):\n- expr = ibis.now().strftime('%Y%m%d %H')\n- result = con.execute(expr)\n- expected = datetime.datetime.utcnow().strftime('%Y%m%d %H')\n- assert result == expected\n-\n-\n @pytest.mark.parametrize(\n ('expr', 'expected'),\n [\n", "test_temporal.py": "@@ -1741,17 +1741,12 @@ def test_day_of_week_column_group_by(\n \n \n @pytest.mark.notimpl(\n- [\"datafusion\", \"mssql\", \"druid\", \"oracle\"], raises=com.OperationNotDefinedError\n+ [\"datafusion\", \"druid\", \"oracle\"], raises=com.OperationNotDefinedError\n )\n def test_now(con):\n expr = ibis.now()\n result = con.execute(expr.name(\"tmp\"))\n- assert isinstance(result, pd.Timestamp)\n-\n- pattern = \"%Y%m%d %H\"\n- result_strftime = con.execute(expr.strftime(pattern).name(\"now\"))\n- expected_strftime = datetime.datetime.utcnow().strftime(pattern)\n- assert result_strftime == expected_strftime\n+ assert isinstance(result, datetime.datetime)\n \n \n @pytest.mark.notimpl([\"polars\"], reason=\"assert 1 == 5\", raises=AssertionError)\n@@ -1759,19 +1754,13 @@ def test_now(con):\n [\"datafusion\", \"druid\", \"oracle\"], raises=com.OperationNotDefinedError\n )\n def test_now_from_projection(alltypes):\n- n = 5\n- expr = alltypes[[ibis.now().name('ts')]].limit(n)\n+ n = 2\n+ expr = alltypes.select(now=ibis.now()).limit(n)\n result = expr.execute()\n- ts = result.ts\n- assert isinstance(result, pd.DataFrame)\n- assert isinstance(ts, pd.Series)\n+ ts = result.now\n assert len(result) == n\n assert ts.nunique() == 1\n-\n- now = pd.Timestamp('now')\n- year = ts.dt.year\n- year_expected = pd.Series([now.year] * n, name='ts')\n- tm.assert_series_equal(year, year_expected, check_dtype=False)\n+ assert ~pd.isna(ts.iat[0])\n \n \n DATE_BACKEND_TYPES = {\n"}
chore: ignore generated inventory files
0c82823904d6619c3a68c958d7399147e63f4dc5
chore
https://github.com/ibis-project/ibis/commit/0c82823904d6619c3a68c958d7399147e63f4dc5
ignore generated inventory files
{".gitignore": "@@ -20,4 +20,8 @@ objects.json\n *.ipynb\n *_files\n \n+# inventories\n+_inv\n+objects.txt\n+\n /.quarto/\n"}
build: small refactoring
5db54a8a4d39f9f7032778132a17cbc58659b9bd
build
https://github.com/tsparticles/tsparticles/commit/5db54a8a4d39f9f7032778132a17cbc58659b9bd
small refactoring
{"utils.ts": "@@ -77,30 +77,30 @@ export function addParticlesFromCanvasPixels(\n pixel = data.pixels[pixelPos.y][pixelPos.x],\n shouldCreateParticle = filter(pixel);\n \n- if (shouldCreateParticle) {\n- const pos = {\n- x: pixelPos.x * scale + positionOffset.x,\n- y: pixelPos.y * scale + positionOffset.y,\n- };\n+ if (!shouldCreateParticle) {\n+ continue;\n+ }\n \n- const pOptions: RecursivePartial<IParticlesOptions> = {};\n+ const pos = {\n+ x: pixelPos.x * scale + positionOffset.x,\n+ y: pixelPos.y * scale + positionOffset.y,\n+ }, pOptions: RecursivePartial<IParticlesOptions> = {};\n \n- if (override.color) {\n- pOptions.color = {\n- value: pixel,\n- };\n- }\n+ if (override.color) {\n+ pOptions.color = {\n+ value: pixel,\n+ };\n+ }\n \n- if (override.opacity) {\n- pOptions.opacity = {\n- value: pixel.a,\n- };\n- }\n+ if (override.opacity) {\n+ pOptions.opacity = {\n+ value: pixel.a,\n+ };\n+ }\n \n- container.particles.addParticle(pos, pOptions);\n+ container.particles.addParticle(pos, pOptions);\n \n- selectedPixels++;\n- }\n+ selectedPixels++;\n }\n }\n \n"}
test(duckdb): only fail when not in a `nix-shell` or using `nix build`
03073824787984ca81fc2f3281f86c0a44f05605
test
https://github.com/rohankumardubey/ibis/commit/03073824787984ca81fc2f3281f86c0a44f05605
only fail when not in a `nix-shell` or using `nix build`
{"test_array.py": "@@ -1,3 +1,5 @@\n+import os\n+\n import numpy as np\n import pandas.testing as tm\n import pytest\n@@ -15,10 +17,6 @@ except ImportError:\n \n pytestmark = [\n pytest.mark.never([\"sqlite\", \"mysql\"], reason=\"No array support\"),\n- pytest.mark.xfail(\n- duckdb and parse_version(duckdb.__version__) == parse_version(\"0.4.0\"),\n- reason=\"DuckDB array support is broken in 0.4.0\",\n- ),\n ]\n \n \n@@ -90,6 +88,20 @@ def test_np_array_literal(con):\n assert np.array_equal(result, arr)\n \n \n+duckdb_0_4_0 = pytest.mark.xfail(\n+ (\n+ # nixpkgs is patched to include the fix, so we pass these tests\n+ # inside the nix-shell or when they run under `nix build`\n+ (not any(key.startswith(\"NIX_\") for key in os.environ.keys()))\n+ and (\n+ parse_version(getattr(duckdb, \"__version__\", \"0.0.0\"))\n+ == parse_version(\"0.4.0\")\n+ )\n+ ),\n+ reason=\"DuckDB array support is broken in 0.4.0 without nix\",\n+)\n+\n+\n builtin_array = toolz.compose(\n # these will almost certainly never be supported\n pytest.mark.never(\n@@ -103,6 +115,7 @@ builtin_array = toolz.compose(\n [\"impala\"],\n reason=\"impala doesn't support array types\",\n ),\n+ duckdb_0_4_0,\n )\n \n unnest = toolz.compose(\n"}
fix(datafusion): always quote column names to prevent datafusion from normalizing case
310db2bf9416b73b1e741570350034bdd9d4337f
fix
https://github.com/rohankumardubey/ibis/commit/310db2bf9416b73b1e741570350034bdd9d4337f
always quote column names to prevent datafusion from normalizing case
{"compiler.py": "@@ -59,12 +59,8 @@ def cast(op):\n \n @translate.register(ops.TableColumn)\n def column(op):\n- table_op = op.table\n-\n- if hasattr(table_op, \"name\"):\n- return df.column(f'{table_op.name}.\"{op.name}\"')\n- else:\n- return df.column(op.name)\n+ id_parts = [getattr(op.table, \"name\", None), op.name]\n+ return df.column(\".\".join(f'\"{id}\"' for id in id_parts if id))\n \n \n @translate.register(ops.SortKey)\n", "test_export.py": "@@ -90,7 +90,6 @@ def test_column_to_pyarrow_array(limit, awards_players):\n \n \n @pytest.mark.parametrize(\"limit\", no_limit)\[email protected]_version(datafusion=[\"datafusion>=21\"])\n def test_empty_column_to_pyarrow(limit, awards_players):\n expr = awards_players.filter(awards_players.awardID == \"DEADBEEF\").awardID\n array = expr.to_pyarrow(limit=limit)\n"}
chore(deps): bump nixpkgs and poetry2nix (#9582)
b54bcdbc45da503812c1108e4e86dc71fc95bfa6
chore
https://github.com/ibis-project/ibis/commit/b54bcdbc45da503812c1108e4e86dc71fc95bfa6
bump nixpkgs and poetry2nix (#9582)
{"flake.lock": "@@ -61,11 +61,11 @@\n },\n \"nixpkgs\": {\n \"locked\": {\n- \"lastModified\": 1720067112,\n- \"narHash\": \"sha256-RqDbuJnwe29ffD8KE810dLxzCyaX5cvXks8TaJZK4H4=\",\n+ \"lastModified\": 1720933452,\n+ \"narHash\": \"sha256-HJhBuKZcmZSrAXukaIus9SsXahIVuo4/zdAhxSJrSQ4=\",\n \"owner\": \"NixOS\",\n \"repo\": \"nixpkgs\",\n- \"rev\": \"1af787b0e7fda63e5313fb1a6815019e0c4d6f9b\",\n+ \"rev\": \"3e7eb6610f8b7003416b2bffdd8865b85326db18\",\n \"type\": \"github\"\n },\n \"original\": {\n@@ -88,11 +88,11 @@\n \"treefmt-nix\": \"treefmt-nix\"\n },\n \"locked\": {\n- \"lastModified\": 1719850884,\n- \"narHash\": \"sha256-UU/lVTHFx0GpEkihoLJrMuM9DcuhZmNe3db45vshSyI=\",\n+ \"lastModified\": 1720947413,\n+ \"narHash\": \"sha256-ggBNou+oqHtRdilA6iFmPABF07zW8FWns5dWIjhBlMM=\",\n \"owner\": \"nix-community\",\n \"repo\": \"poetry2nix\",\n- \"rev\": \"42262f382c68afab1113ebd1911d0c93822d756e\",\n+ \"rev\": \"54083f90fdd45b20d092ba947c58f952f10bf501\",\n \"type\": \"github\"\n },\n \"original\": {\n"}
chore: remove broken link
90274cbc4576196540bae8f93e56b7bf7c4a58e2
chore
https://github.com/rohankumardubey/ibis/commit/90274cbc4576196540bae8f93e56b7bf7c4a58e2
remove broken link
{"input_output_penguins.qmd": "@@ -1,6 +1,6 @@\n ## Data platforms\n \n-You can connect Ibis to [any supported backend](#backends-supported) to read and write data in backend-native tables.\n+You can connect Ibis to any supported backend to read and write data in backend-native tables.\n \n ```{python}\n # | code-fold: true\n"}