commit_message
stringlengths
3
2.32k
diff
stringlengths
186
49.5k
concern_count
int64
1
5
shas
stringlengths
44
220
types
stringlengths
6
45
added vue3 readme,Deploy utilities from correct folder Signed-off-by: rjshrjndrn <[email protected]>,add test case with multiple partitions for message,update deps
["diff --git a/core/main/README.md b/core/main/README.md\nindex e5e4c93..e9cfda9 100644\n--- a/core/main/README.md\n+++ b/core/main/README.md\n@@ -217,7 +217,7 @@ You can find the instructions [here](https://github.com/matteobruni/tsparticles/\n \n You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/svelte/README.md)\n \n-### VueJS\n+### VueJS 2.x\n \n #### `particles.vue`\n \n@@ -225,6 +225,14 @@ You can find the instructions [here](https://github.com/matteobruni/tsparticles/\n \n You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/vue/README.md)\n \n+### VueJS 3.x\n+\n+#### `particles.vue3`\n+\n+[![npm](https://img.shields.io/npm/v/particles.vue3)](https://www.npmjs.com/package/particles.vue3) [![npm](https://img.shields.io/npm/dm/particles.vue3)](https://www.npmjs.com/package/particles.vue3)\n+\n+You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/vue3/README.md)\n+\n ---\n \n ## **_Demo / Generator_**\ndiff --git a/core/main/tsconfig.json b/core/main/tsconfig.json\nindex 7916bc5..72399c0 100644\n--- a/core/main/tsconfig.json\n+++ b/core/main/tsconfig.json\n@@ -107,10 +107,14 @@\n \"source\": \"../../components/react/README.md\"\n },\n {\n- \"title\": \"Vue\",\n+ \"title\": \"Vue 2.x\",\n \"source\": \"../../components/vue/README.md\"\n },\n {\n+ \"title\": \"Vue 3.x\",\n+ \"source\": \"../../components/vue3/README.md\"\n+ },\n+ {\n \"title\": \"Svelte\",\n \"source\": \"../../components/svelte/README.md\"\n },\n", "diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml\nindex 92e130c..afbc850 100644\n--- a/.github/workflows/utilities.yaml\n+++ b/.github/workflows/utilities.yaml\n@@ -43,7 +43,7 @@ jobs:\n PUSH_IMAGE=1 bash build.sh\n - name: Deploy to kubernetes\n run: |\n- cd scripts/helm/\n+ cd scripts/helmcharts/\n sed -i \"s#openReplayContainerRegistry.*#openReplayContainerRegistry: \\\"${{ secrets.OSS_REGISTRY_URL }}\\\"#g\" vars.yaml\n sed -i \"s#minio_access_key.*#minio_access_key: \\\"${{ secrets.OSS_MINIO_ACCESS_KEY }}\\\" #g\" vars.yaml\n sed -i \"s#minio_secret_key.*#minio_secret_key: \\\"${{ secrets.OSS_MINIO_SECRET_KEY }}\\\" #g\" vars.yaml\n", "diff --git a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\nindex 693d1da..e3552d4 100644\n--- a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n+++ b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n@@ -74,7 +74,7 @@ public class SubscriptionCommandSender {\n new CloseWorkflowInstanceSubscriptionCommand();\n \n private final ClientTransport subscriptionClient;\n- private final IntArrayList partitionIds;\n+ private final IntArrayList partitionIds = new IntArrayList();\n \n private int partitionId;\n private TopologyPartitionListenerImpl partitionListener;\n@@ -82,7 +82,6 @@ public class SubscriptionCommandSender {\n public SubscriptionCommandSender(\n final ClusterCfg clusterCfg, final ClientTransport subscriptionClient) {\n this.subscriptionClient = subscriptionClient;\n- partitionIds = new IntArrayList();\n partitionIds.addAll(clusterCfg.getPartitionIds());\n }\n \n@@ -100,7 +99,8 @@ public class SubscriptionCommandSender {\n final DirectBuffer messageName,\n final DirectBuffer correlationKey) {\n \n- final int subscriptionPartitionId = getSubscriptionPartitionId(correlationKey);\n+ final int subscriptionPartitionId =\n+ SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n \n openMessageSubscriptionCommand.setSubscriptionPartitionId(subscriptionPartitionId);\n openMessageSubscriptionCommand.setWorkflowInstanceKey(workflowInstanceKey);\n@@ -111,14 +111,6 @@ public class SubscriptionCommandSender {\n return sendSubscriptionCommand(subscriptionPartitionId, openMessageSubscriptionCommand);\n }\n \n- private int getSubscriptionPartitionId(final DirectBuffer correlationKey) {\n- if (partitionIds == null) {\n- throw new IllegalStateException(\"no partition ids available\");\n- }\n-\n- return SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n- }\n-\n public boolean openWorkflowInstanceSubscription(\n final long workflowInstanceKey,\n final long elementInstanceKey,\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\nindex 4baed4f..838c9ca 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n@@ -36,7 +36,6 @@ import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n import io.zeebe.protocol.clientapi.RecordType;\n import io.zeebe.protocol.clientapi.ValueType;\n-import io.zeebe.protocol.impl.SubscriptionUtil;\n import io.zeebe.protocol.intent.DeploymentIntent;\n import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n@@ -44,7 +43,6 @@ import io.zeebe.protocol.intent.WorkflowInstanceSubscriptionIntent;\n import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n import io.zeebe.test.util.record.RecordingExporter;\n-import io.zeebe.util.buffer.BufferUtil;\n import java.util.List;\n import java.util.stream.Collectors;\n import org.agrona.DirectBuffer;\n@@ -171,39 +169,6 @@ public class MessageCatchElementTest {\n }\n \n @Test\n- public void shouldOpenMessageSubscriptionsOnSamePartition() {\n- // given\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n-\n- final String correlationKey = \"order-123\";\n-\n- final PartitionTestClient workflowPartition = apiRule.partitionClient(partitionIds.get(0));\n- final PartitionTestClient subscriptionPartition =\n- apiRule.partitionClient(getPartitionId(correlationKey));\n-\n- testClient.deploy(CATCH_EVENT_WORKFLOW);\n-\n- // when\n- final long workflowInstanceKey1 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- final long workflowInstanceKey2 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- // then\n- final List<Record<MessageSubscriptionRecordValue>> subscriptions =\n- subscriptionPartition\n- .receiveMessageSubscriptions()\n- .withIntent(MessageSubscriptionIntent.OPENED)\n- .limit(2)\n- .collect(Collectors.toList());\n-\n- assertThat(subscriptions)\n- .extracting(s -> s.getValue().getWorkflowInstanceKey())\n- .contains(workflowInstanceKey1, workflowInstanceKey2);\n- }\n-\n- @Test\n public void shouldOpenWorkflowInstanceSubscription() {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", \"order-123\"));\n@@ -352,10 +317,4 @@ public class MessageCatchElementTest {\n .exists())\n .isTrue();\n }\n-\n- private int getPartitionId(final String correlationKey) {\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n- return SubscriptionUtil.getSubscriptionPartitionId(\n- BufferUtil.wrapString(correlationKey), partitionIds.size());\n- }\n }\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..cf8261a\n--- /dev/null\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,134 @@\n+/*\n+ * Zeebe Broker Core\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * This program is free software: you can redistribute it and/or modify\n+ * it under the terms of the GNU Affero General Public License as published by\n+ * the Free Software Foundation, either version 3 of the License, or\n+ * (at your option) any later version.\n+ *\n+ * This program is distributed in the hope that it will be useful,\n+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\n+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n+ * GNU Affero General Public License for more details.\n+ *\n+ * You should have received a copy of the GNU Affero General Public License\n+ * along with this program. If not, see <http://www.gnu.org/licenses/>.\n+ */\n+package io.zeebe.broker.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.impl.SubscriptionUtil;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n+import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import io.zeebe.util.buffer.BufferUtil;\n+import java.util.List;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"receive-message\")\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+\n+ public ClientApiRule apiRule = new ClientApiRule(brokerRule::getClientAddress);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(apiRule);\n+\n+ private PartitionTestClient testClient;\n+\n+ @Before\n+ public void init() {\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_0)).isEqualTo(0);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_1)).isEqualTo(1);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_2)).isEqualTo(2);\n+\n+ testClient = apiRule.partitionClient();\n+\n+ testClient.deploy(WORKFLOW);\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ apiRule\n+ .partitionClient(0)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_0, asMsgPack(\"p\", \"p0\"));\n+ apiRule\n+ .partitionClient(1)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_1, asMsgPack(\"p\", \"p1\"));\n+ apiRule\n+ .partitionClient(2)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_2, asMsgPack(\"p\", \"p2\"));\n+\n+ // when\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ private int getPartitionId(final String correlationKey) {\n+ final List<Integer> partitionIds = apiRule.getPartitionIds();\n+ return SubscriptionUtil.getSubscriptionPartitionId(\n+ BufferUtil.wrapString(correlationKey), partitionIds.size());\n+ }\n+}\ndiff --git a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\nindex dac11a2..e2b8397 100644\n--- a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n+++ b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n@@ -329,6 +329,7 @@ public class PartitionTestClient {\n final String messageName, final String correlationKey, final byte[] payload, final long ttl) {\n return apiRule\n .createCmdRequest()\n+ .partitionId(partitionId)\n .type(ValueType.MESSAGE, MessageIntent.PUBLISH)\n .command()\n .put(\"name\", messageName)\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\nindex 9a122d9..b7db67e 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n@@ -619,14 +619,9 @@ public class BrokerReprocessingTest {\n }\n \n @Test\n- public void shouldCorrelateMessageAfterRestartIfEnteredBeforeA() throws Exception {\n+ public void shouldCorrelateMessageAfterRestartIfEnteredBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n final long workflowInstanceKey =\n startWorkflowInstance(PROCESS_ID, singletonMap(\"orderId\", \"order-123\"))\n@@ -658,12 +653,7 @@ public class BrokerReprocessingTest {\n @Test\n public void shouldCorrelateMessageAfterRestartIfPublishedBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n publishMessage(\"order canceled\", \"order-123\", singletonMap(\"foo\", \"bar\"));\n reprocessingTrigger.accept(this);\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\ndeleted file mode 100644\nindex c6a05fb..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\n+++ /dev/null\n@@ -1,176 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-import org.junit.runner.RunWith;\n-import org.junit.runners.Parameterized;\n-import org.junit.runners.Parameterized.Parameter;\n-import org.junit.runners.Parameterized.Parameters;\n-\n-@RunWith(Parameterized.class)\n-public class MessageCorrelationTest {\n-\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private static final BpmnModelInstance CATCH_EVENT_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- private static final BpmnModelInstance RECEIVE_TASK_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .receiveTask(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- @Parameter(0)\n- public String elementType;\n-\n- @Parameter(1)\n- public BpmnModelInstance workflow;\n-\n- @Parameters(name = \"{0}\")\n- public static final Object[][] parameters() {\n- return new Object[][] {\n- {\"intermediate message catch event\", CATCH_EVENT_WORKFLOW},\n- {\"receive task\", RECEIVE_TASK_WORKFLOW}\n- };\n- }\n-\n- @Before\n- public void init() {\n- final DeploymentEvent deploymentEvent =\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(workflow, \"wf.bpmn\")\n- .send()\n- .join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfEnteredBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"receive-message\");\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfPublishedBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageAndMergePayload() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .payload(Collections.singletonMap(\"foo\", \"bar\"))\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n-\n- assertElementCompleted(\n- \"wf\",\n- \"receive-message\",\n- (catchEventOccurredEvent) ->\n- assertThat(catchEventOccurredEvent.getPayloadAsMap())\n- .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\ndeleted file mode 100644\nindex 7845eec..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\n+++ /dev/null\n@@ -1,234 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.assertThatThrownBy;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.ZeebeFuture;\n-import io.zeebe.client.api.clients.WorkflowClient;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.client.api.events.WorkflowInstanceEvent;\n-import io.zeebe.client.cmd.ClientException;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.time.Duration;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-\n-public class PublishMessageTest {\n-\n- private static final BpmnModelInstance WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"catch-event\")\n- .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .endEvent()\n- .done();\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private WorkflowClient workflowClient;\n-\n- @Before\n- public void init() {\n-\n- workflowClient = clientRule.getClient().workflowClient();\n-\n- final DeploymentEvent deploymentEvent =\n- workflowClient.newDeployCommand().addWorkflowModel(WORKFLOW, \"wf.bpmn\").send().join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageToAllSubscriptions() {\n- // given\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageWithZeroTTL() {\n- // given\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"catch-event\");\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .send()\n- .join();\n-\n- // then\n- assertElementCompleted(\"wf\", \"catch-event\");\n- }\n-\n- @Test\n- public void shouldNotCorrelateMessageAfterTTL() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .payload(Collections.singletonMap(\"msg\", \"failure\"))\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ofMinutes(1))\n- .payload(Collections.singletonMap(\"msg\", \"expected\"))\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n-\n- assertElementCompleted(\n- \"wf\",\n- \"catch-event\",\n- (catchEventOccurred) ->\n- assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n- }\n-\n- @Test\n- public void shouldCorrelateMessageOnDifferentPartitions() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-124\")\n- .send()\n- .join();\n-\n- // when\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-124\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldRejectMessageWithSameId() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send()\n- .join();\n-\n- // when\n- final ZeebeFuture<Void> future =\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send();\n-\n- // then\n- assertThatThrownBy(future::join)\n- .isInstanceOf(ClientException.class)\n- .hasMessageContaining(\"message with id 'foo' is already published\");\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..0e37c95\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,196 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.intent.MessageIntent;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import java.util.Collections;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent()\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldPublishMessageOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+ });\n+\n+ // then\n+ assertThat(RecordingExporter.messageRecords(MessageIntent.PUBLISHED).limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+\n+ // when\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnSamePartitionsAfterRestart() {\n+ // given\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(15)\n+ .exists())\n+ .isTrue();\n+\n+ // when\n+ brokerRule.stopBroker();\n+ brokerRule.startBroker();\n+\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ private void createWorkflowInstance(Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+\n+ private void publishMessage(String correlationKey, Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"message\")\n+ .correlationKey(correlationKey)\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\nnew file mode 100644\nindex 0000000..3b08572\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\n@@ -0,0 +1,198 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.assertThatThrownBy;\n+import static org.assertj.core.api.Assertions.entry;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.ZeebeFuture;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.client.cmd.ClientException;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import java.time.Duration;\n+import java.util.Collections;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationTest {\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"catch-event\")\n+ .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n+ .endEvent()\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessage() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .payload(Collections.singletonMap(\"foo\", \"bar\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertWorkflowInstanceCompleted(PROCESS_ID);\n+\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurredEvent) ->\n+ assertThat(catchEventOccurredEvent.getPayloadAsMap())\n+ .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageWithZeroTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ assertElementActivated(\"catch-event\");\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(PROCESS_ID, \"catch-event\");\n+ }\n+\n+ @Test\n+ public void shouldNotCorrelateMessageAfterTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .payload(Collections.singletonMap(\"msg\", \"failure\"))\n+ .send()\n+ .join();\n+\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ofMinutes(1))\n+ .payload(Collections.singletonMap(\"msg\", \"expected\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurred) ->\n+ assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n+ }\n+\n+ @Test\n+ public void shouldRejectMessageWithSameId() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send()\n+ .join();\n+\n+ // when\n+ final ZeebeFuture<Void> future =\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send();\n+\n+ // then\n+ assertThatThrownBy(future::join)\n+ .isInstanceOf(ClientException.class)\n+ .hasMessageContaining(\"message with id 'foo' is already published\");\n+ }\n+}\n", "diff --git a/package.json b/package.json\nindex 3696132..ba002d2 100755\n--- a/package.json\n+++ b/package.json\n@@ -34,7 +34,6 @@\n \"koa-router\": \"~7.4.0\",\n \"koa-session\": \"~5.10.1\",\n \"koa-static\": \"~5.0.0\",\n- \"koa2-ratelimit\": \"~0.8.0\",\n \"koa2-swagger-ui\": \"~2.13.2\",\n \"node-fetch\": \"^2.5.0\",\n \"passport-github\": \"~1.1.0\",\ndiff --git a/src/index.ts b/src/index.ts\nindex 847e8aa..8c9baff 100755\n--- a/src/index.ts\n+++ b/src/index.ts\n@@ -4,7 +4,6 @@ import * as Koa from 'koa';\n import * as bodyParser from 'koa-bodyparser';\n import * as session from 'koa-session';\n import * as serve from 'koa-static';\n-// import { RateLimit } from 'koa2-ratelimit';\n import { Server } from 'net';\n \n import { setupPassport } from './auth';\ndiff --git a/src/typings/koa-router.d.ts b/src/typings/koa-router.d.ts\ndeleted file mode 100644\nindex f891ed8..0000000\n--- a/src/typings/koa-router.d.ts\n+++ /dev/null\n@@ -1,16 +0,0 @@\n-import * as koa from 'koa-router';\n-import { IUserSession } from '../models';\n-import { ILogger } from '../logger';\n-\n-declare module 'koa-router' {\n- export interface IRouterContext {\n- state: { user: IUserSession | undefined };\n- logger: ILogger;\n- }\n-}\n-\n-declare module 'koa' {\n- export interface Context {\n- logger: ILogger;\n- }\n-}\ndiff --git a/src/typings/koa2-ratelimit/index.d.ts b/src/typings/koa2-ratelimit/index.d.ts\ndeleted file mode 100644\nindex cc73228..0000000\n--- a/src/typings/koa2-ratelimit/index.d.ts\n+++ /dev/null\n@@ -1,13 +0,0 @@\n-declare module 'koa2-ratelimit' {\n- import * as Koa from 'koa';\n- interface Props {\n- interval: { min?: number; hour?: number }; // 15 minutes = 15*60*1000\n- max: number;\n- message?: string;\n- prefixKey?: string;\n- getUserId?: (ctx: Koa.Context) => Promise<string>;\n- }\n- export namespace RateLimit {\n- function middleware(options: Props): Koa.Middleware;\n- }\n-}\ndiff --git a/src/typings/mockingoose/index.d.ts b/src/typings/mockingoose/index.d.ts\ndeleted file mode 100644\nindex 757c4e4..0000000\n--- a/src/typings/mockingoose/index.d.ts\n+++ /dev/null\n@@ -1,5 +0,0 @@\n-declare module 'mockingoose' {\n- const mockingoose: any;\n-\n- export default mockingoose;\n-}\n"]
4
["e4c3e2cff769ce46d22d5c8f7dd527510443a8a7", "2ebf04099353ef70395b8c8f5e130f70e1ed0814", "2d416be63eeec9e7fdb90a62c40c8ad8f0672efa", "f46c6c6c26a14312aa05a77ff2a14aebd74e13ac"]
["docs", "ci", "test", "build"]
remove unnecessary spotless definition It receives this already from the parent pom.,add more tests for Utils.lookupPathFromDecorator,rename step,fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>
["diff --git a/benchmarks/project/pom.xml b/benchmarks/project/pom.xml\nindex 62030b6..ab87dea 100644\n--- a/benchmarks/project/pom.xml\n+++ b/benchmarks/project/pom.xml\n@@ -123,11 +123,6 @@\n </plugin>\n \n <plugin>\n- <groupId>com.diffplug.spotless</groupId>\n- <artifactId>spotless-maven-plugin</artifactId>\n- </plugin>\n-\n- <plugin>\n <groupId>org.apache.maven.plugins</groupId>\n <artifactId>maven-shade-plugin</artifactId>\n <executions>\n", "diff --git a/lib/utils/Utils.ts b/lib/utils/Utils.ts\nindex 6de6e05..b03b3e9 100644\n--- a/lib/utils/Utils.ts\n+++ b/lib/utils/Utils.ts\n@@ -338,15 +338,8 @@ export class Utils {\n line++;\n }\n \n- if (stack[line].match(/\\(.+\\)/i)) {\n- meta.path = Utils.normalizePath(\n- stack[line].match(/\\((.*):\\d+:\\d+\\)/)![1],\n- );\n- } else {\n- meta.path = Utils.normalizePath(\n- stack[line].match(/at\\s*(.*):\\d+:\\d+$/)![1],\n- );\n- }\n+ const re = stack[line].match(/\\(.+\\)/i) ? /\\((.*):\\d+:\\d+\\)/ : /at\\s*(.*):\\d+:\\d+$/;\n+ meta.path = Utils.normalizePath(stack[line].match(re)![1]);\n \n return meta.path;\n }\ndiff --git a/tests/Utils.test.ts b/tests/Utils.test.ts\nindex c3e9aa1..4d2a209 100644\n--- a/tests/Utils.test.ts\n+++ b/tests/Utils.test.ts\n@@ -256,7 +256,7 @@ describe('Utils', () => {\n ' at Object.__decorate (/usr/local/var/www/my-project/node_modules/tslib/tslib.js:92:96)',\n ' at Object.<anonymous> (/usr/local/var/www/my-project/dist/entities/Customer.js:20:9)',\n ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n- ' at Object.Module._extensions..js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Object.Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n ' at Module.load (internal/modules/cjs/loader.js:643:32)',\n ' at Function.Module._load (internal/modules/cjs/loader.js:556:12)',\n ];\n@@ -272,10 +272,25 @@ describe('Utils', () => {\n ' at Object.<anonymous> (/usr/local/var/www/my-project/src/entities/Customer.ts:9:3)',\n ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n ' at Module.m._compile (/usr/local/var/www/my-project/node_modules/ts-node/src/index.ts:473:23)',\n- ' at Module._extensions..js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n ' at Object.require.extensions.<computed> [as .ts] (/usr/local/var/www/my-project/node_modules/ts-node/src/index.ts:476:12)',\n ];\n expect(Utils.lookupPathFromDecorator({} as any, stack2)).toBe('/usr/local/var/www/my-project/src/entities/Customer.ts');\n+\n+ // no parens\n+ const stack3 = [\n+ ' at Function.lookupPathFromDecorator (/usr/local/var/www/my-project/node_modules/mikro-orm/dist/utils/Utils.js:170:23)',\n+ ' at /usr/local/var/www/my-project/node_modules/mikro-orm/dist/decorators/PrimaryKey.js:12:23',\n+ ' at DecorateProperty (/usr/local/var/www/my-project/node_modules/reflect-metadata/Reflect.js:553:33)',\n+ ' at Object.decorate (/usr/local/var/www/my-project/node_modules/reflect-metadata/Reflect.js:123:24)',\n+ ' at Object.__decorate (/usr/local/var/www/my-project/node_modules/tslib/tslib.js:92:96)',\n+ ' at /usr/local/var/www/my-project/dist/entities/Customer.js:20:9',\n+ ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n+ ' at Object.Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Module.load (internal/modules/cjs/loader.js:643:32)',\n+ ' at Function.Module._load (internal/modules/cjs/loader.js:556:12)',\n+ ];\n+ expect(Utils.lookupPathFromDecorator({} as any, stack3)).toBe('/usr/local/var/www/my-project/dist/entities/Customer.js');\n });\n \n test('lookup path from decorator on windows', () => {\n@@ -287,7 +302,7 @@ describe('Utils', () => {\n ' at Object.<anonymous> (C:\\\\www\\\\my-project\\\\src\\\\entities\\\\Customer.ts:7:5)',\n ' at Module._compile (internal/modules/cjs/loader.js:936:30)',\n ' at Module.m._compile (C:\\\\www\\\\my-project\\\\node_modules\\\\ts-node\\\\src\\\\index.ts:493:23)',\n- ' at Module._extensions..js (internal/modules/cjs/loader.js:947:10)',\n+ ' at Module._extensions.js (internal/modules/cjs/loader.js:947:10)',\n ' at Object.require.extensions.<computed> [as .ts] (C:\\\\www\\\\my-project\\\\node_modules\\\\ts-node\\\\src\\\\index.ts:496:12)',\n ' at Module.load (internal/modules/cjs/loader.js:790:32)',\n ' at Function.Module._load (internal/modules/cjs/loader.js:703:12)',\n", "diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n", "diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n"]
4
["7f9721dc9bbf66a3712d59352f64ca089da139f0", "c5e86dbc00a13a355bffadeb2db197e2fea5640f", "34875bc0e59b43d9041903101c823d25ec194a21", "9ed3c0c4a72af977fc9150512fb6538f20a94b22"]
["build", "test", "ci", "docs"]
add Expr.equals benchmark,correct width when --no-quotes is used,coordinator accepts a request transformer instead of a list of operations The request transformer can generate the operations from the current topology. This helps to - ensure that the operations are generated based on the latest topology. When concurrent changes happens, coordinator can detect it. Previously it was unclear because by the time handle apply operations, the cluster topology might have changed. - return the simulated final topology as part of the result,README
["diff --git a/ibis/tests/benchmarks/test_benchmarks.py b/ibis/tests/benchmarks/test_benchmarks.py\nindex 78305bb..9c7e6d7 100644\n--- a/ibis/tests/benchmarks/test_benchmarks.py\n+++ b/ibis/tests/benchmarks/test_benchmarks.py\n@@ -1,3 +1,4 @@\n+import copy\n import functools\n import itertools\n import string\n@@ -340,8 +341,9 @@ def test_execute(benchmark, expression_fn, pt):\n benchmark(expr.execute)\n \n \n-def test_repr_tpc_h02(benchmark):\n- part = ibis.table(\[email protected]\n+def part():\n+ return ibis.table(\n dict(\n p_partkey=\"int64\",\n p_size=\"int64\",\n@@ -350,7 +352,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"part\",\n )\n- supplier = ibis.table(\n+\n+\[email protected]\n+def supplier():\n+ return ibis.table(\n dict(\n s_suppkey=\"int64\",\n s_nationkey=\"int64\",\n@@ -362,7 +368,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"supplier\",\n )\n- partsupp = ibis.table(\n+\n+\[email protected]\n+def partsupp():\n+ return ibis.table(\n dict(\n ps_partkey=\"int64\",\n ps_suppkey=\"int64\",\n@@ -370,14 +380,25 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"partsupp\",\n )\n- nation = ibis.table(\n+\n+\[email protected]\n+def nation():\n+ return ibis.table(\n dict(n_nationkey=\"int64\", n_regionkey=\"int64\", n_name=\"string\"),\n name=\"nation\",\n )\n- region = ibis.table(\n+\n+\[email protected]\n+def region():\n+ return ibis.table(\n dict(r_regionkey=\"int64\", r_name=\"string\"), name=\"region\"\n )\n \n+\[email protected]\n+def tpc_h02(part, supplier, partsupp, nation, region):\n REGION = \"EUROPE\"\n SIZE = 25\n TYPE = \"BRASS\"\n@@ -420,7 +441,7 @@ def test_repr_tpc_h02(benchmark):\n ]\n )\n \n- expr = q.sort_by(\n+ return q.sort_by(\n [\n ibis.desc(q.s_acctbal),\n q.n_name,\n@@ -429,7 +450,9 @@ def test_repr_tpc_h02(benchmark):\n ]\n ).limit(100)\n \n- benchmark(repr, expr)\n+\n+def test_repr_tpc_h02(benchmark, tpc_h02):\n+ benchmark(repr, tpc_h02)\n \n \n def test_repr_huge_union(benchmark):\n@@ -478,3 +501,7 @@ def test_complex_datatype_builtins(benchmark, func):\n )\n )\n benchmark(func, datatype)\n+\n+\n+def test_large_expr_equals(benchmark, tpc_h02):\n+ benchmark(ir.Expr.equals, tpc_h02, copy.deepcopy(tpc_h02))\n", "diff --git a/src/output/grid.rs b/src/output/grid.rs\nindex 37f6c57..ce989e5 100644\n--- a/src/output/grid.rs\n+++ b/src/output/grid.rs\n@@ -8,6 +8,8 @@ use crate::output::file_name::{Classify, Options as FileStyle};\n use crate::output::file_name::{EmbedHyperlinks, ShowIcons};\n use crate::theme::Theme;\n \n+use super::file_name::QuoteStyle;\n+\n #[derive(PartialEq, Eq, Debug, Copy, Clone)]\n pub struct Options {\n pub across: bool,\n@@ -55,27 +57,34 @@ impl<'a> Render<'a> {\n } else {\n 0\n };\n-\n- let space_filename_offset = if file.name.contains(' ') || file.name.contains('\\'') {\n- 2\n- } else {\n- 0\n+ let space_filename_offset = match self.file_style.quote_style {\n+ QuoteStyle::QuoteSpaces if file.name.contains(' ') => 2,\n+ QuoteStyle::NoQuotes => 0,\n+ _ => 0, // Default case\n };\n-\n let contents = filename.paint();\n- #[rustfmt::skip]\n let width = match (\n filename.options.embed_hyperlinks,\n filename.options.show_icons,\n ) {\n- ( EmbedHyperlinks::On, ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing) )\n- => filename.bare_width() + classification_width + 1 + (spacing as usize) + space_filename_offset,\n- ( EmbedHyperlinks::On, ShowIcons::Never )\n- => filename.bare_width() + classification_width + space_filename_offset,\n- ( EmbedHyperlinks::Off, ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing) )\n- => filename.bare_width() + 1 + (spacing as usize) + space_filename_offset,\n- ( EmbedHyperlinks::Off, _ )\n- => *contents.width(),\n+ (\n+ EmbedHyperlinks::On,\n+ ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing),\n+ ) => {\n+ filename.bare_width()\n+ + classification_width\n+ + 1\n+ + (spacing as usize)\n+ + space_filename_offset\n+ }\n+ (EmbedHyperlinks::On, ShowIcons::Never) => {\n+ filename.bare_width() + classification_width + space_filename_offset\n+ }\n+ (\n+ EmbedHyperlinks::Off,\n+ ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing),\n+ ) => filename.bare_width() + 1 + (spacing as usize) + space_filename_offset,\n+ (EmbedHyperlinks::Off, _) => *contents.width(),\n };\n \n grid.add(tg::Cell {\n", "diff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\nindex 8bb5c3d..f8f5e24 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.topology.changes;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.topology.state.ClusterTopology;\n import io.camunda.zeebe.topology.state.TopologyChangeOperation;\n+import io.camunda.zeebe.util.Either;\n import java.util.List;\n \n public interface TopologyChangeCoordinator {\n@@ -39,4 +40,16 @@ public interface TopologyChangeCoordinator {\n ActorFuture<Boolean> hasCompletedChanges(final long version);\n \n ActorFuture<ClusterTopology> getCurrentTopology();\n+\n+ ActorFuture<TopologyChangeResult> applyOperations(TopologyChangeRequest request);\n+\n+ record TopologyChangeResult(\n+ ClusterTopology currentTopology,\n+ ClusterTopology finalTopology,\n+ List<TopologyChangeOperation> operations) {}\n+\n+ interface TopologyChangeRequest {\n+ Either<Exception, List<TopologyChangeOperation>> operations(\n+ final ClusterTopology currentTopology);\n+ }\n }\ndiff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\nindex 13ec754..877fc3c 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n@@ -103,6 +103,62 @@ public class TopologyChangeCoordinatorImpl implements TopologyChangeCoordinator \n return clusterTopologyManager.getClusterTopology();\n }\n \n+ @Override\n+ public ActorFuture<TopologyChangeResult> applyOperations(final TopologyChangeRequest request) {\n+ final ActorFuture<TopologyChangeResult> future = executor.createFuture();\n+ clusterTopologyManager\n+ .getClusterTopology()\n+ .onComplete(\n+ (currentClusterTopology, errorOnGettingTopology) -> {\n+ if (errorOnGettingTopology != null) {\n+ future.completeExceptionally(errorOnGettingTopology);\n+ return;\n+ }\n+\n+ final var operationsEither = request.operations(currentClusterTopology);\n+ if (operationsEither.isLeft()) {\n+ future.completeExceptionally(operationsEither.getLeft());\n+ return;\n+ }\n+ final var operations = operationsEither.get();\n+ if (operations.isEmpty()) {\n+ // No operations to apply\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, currentClusterTopology, operations));\n+ return;\n+ }\n+\n+ final ActorFuture<ClusterTopology> validation =\n+ validateTopologyChangeRequest(currentClusterTopology, operations);\n+\n+ validation.onComplete(\n+ (simulatedFinalTopology, validationError) -> {\n+ if (validationError != null) {\n+ future.completeExceptionally(validationError);\n+ return;\n+ }\n+\n+ // if the validation was successful, apply the changes\n+ final ActorFuture<ClusterTopology> applyFuture = executor.createFuture();\n+ applyTopologyChange(\n+ operations, currentClusterTopology, simulatedFinalTopology, applyFuture);\n+\n+ applyFuture.onComplete(\n+ (ignore, error) -> {\n+ if (error == null) {\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, simulatedFinalTopology, operations));\n+ } else {\n+ future.completeExceptionally(error);\n+ }\n+ });\n+ });\n+ });\n+ return future;\n+ }\n+\n private ActorFuture<ClusterTopology> validateTopologyChangeRequest(\n final ClusterTopology currentClusterTopology,\n final List<TopologyChangeOperation> operations) {\n", "diff --git a/README.md b/README.md\nindex 11a24b3..56e8d2a 100644\n--- a/README.md\n+++ b/README.md\n@@ -16,7 +16,9 @@ oclif: create your own CLI\n \n # Description\n \n-This is a framework for building CLIs in Node.js. This framework was built out of the [Heroku CLI](https://cli.heroku.com) but generalized to build any custom CLI. It's designed both for simple CLIs that can be just a single file with a few flag options, or for very complex CLIs that have many commands (like git or heroku). Most CLI tools in Node are simple flag parsers but oclif is much more than that\u2014though without the overhead of making simple CLIs quick to write with minimal boilerplate.\n+This is a framework for building CLIs in Node.js. This framework was built out of the [Heroku CLI](https://cli.heroku.com) but generalized to build any custom CLI. It's designed both for simple CLIs that can be just a single file with a few flag options, or for very complex CLIs that have many commands (like git or heroku).\n+\n+Most CLI tools for Node are simple flag parsers but oclif is much more than that\u2014though without the overhead of making simple CLIs quick to write with minimal boilerplate.\n \n # Features\n \n"]
4
["b700285c1f27588922d9c56527cee721bb884682", "61eaa2d0cca9bd27d6c5f0a8f9b34200b77fdbb0", "dec860436916ef216998f80f8b2f9c39d00c064d", "363f84c7da411468b4103da8e0b39ca48cfd8327"]
["test", "fix", "feat", "docs"]
Use arm64v8 postfix for Cube Store :dev build,fixed tick interval,simplyfy statement,Introduce timediff fn (stub)
["diff --git a/.github/workflows/rust-cubestore-master.yml b/.github/workflows/rust-cubestore-master.yml\nindex 4a84984..bb07cd7 100644\n--- a/.github/workflows/rust-cubestore-master.yml\n+++ b/.github/workflows/rust-cubestore-master.yml\n@@ -115,9 +115,9 @@ jobs:\n if [[ $VERSION =~ ^v[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$ ]]; then\n MINOR=${VERSION%.*}\n MAJOR=${MINOR%.*}\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR},${DOCKER_IMAGE}:latest\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR}\"\n elif [ \"${{ github.event_name }}\" = \"push\" ]; then\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}${{ matrix.postfix }}\"\n fi\n \n echo ::set-output name=version::${VERSION}\n", "diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go\nindex 4a5e764..35c3ff2 100644\n--- a/backend/services/integrations/main.go\n+++ b/backend/services/integrations/main.go\n@@ -54,7 +54,7 @@ func main() {\n \tsigchan := make(chan os.Signal, 1)\n \tsignal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)\n \n-\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL)\n+\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL * time.Millisecond)\n \n \tlog.Printf(\"Integration service started\\n\")\n \tmanager.RequestAll()\n@@ -66,7 +66,7 @@ func main() {\n \t\t\tpg.Close()\n \t\t\tos.Exit(0)\n \t\tcase <-tick:\n-\t\t\t// log.Printf(\"Requesting all...\\n\")\n+\t\t\tlog.Printf(\"Requesting all...\\n\")\n \t\t\tmanager.RequestAll()\n \t\tcase event := <-manager.Events:\n \t\t\t// log.Printf(\"New integration event: %v\\n\", *event.RawErrorEvent)\n", "diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n", "diff --git a/rust/Cargo.lock b/rust/Cargo.lock\nindex b42616f..4795eb6 100644\n--- a/rust/Cargo.lock\n+++ b/rust/Cargo.lock\n@@ -1287,7 +1287,7 @@ dependencies = [\n [[package]]\n name = \"datafusion\"\n version = \"5.1.0\"\n-source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=8df4132b83d896a0d3db5c82a4eaaa3eaa285d15#8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\"\n+source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=868f3c4de13d13cda84cee33475b9782b94fa60c#868f3c4de13d13cda84cee33475b9782b94fa60c\"\n dependencies = [\n \"ahash 0.7.4\",\n \"arrow 6.0.0\",\ndiff --git a/rust/cubesql/Cargo.toml b/rust/cubesql/Cargo.toml\nindex 3cb386a..9aef494 100644\n--- a/rust/cubesql/Cargo.toml\n+++ b/rust/cubesql/Cargo.toml\n@@ -9,7 +9,7 @@ documentation = \"https://cube.dev/docs\"\n homepage = \"https://cube.dev\"\n \n [dependencies]\n-datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\", default-features = false, features = [\"unicode_expressions\"] }\n+datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"868f3c4de13d13cda84cee33475b9782b94fa60c\", default-features = false, features = [\"unicode_expressions\"] }\n anyhow = \"1.0\"\n thiserror = \"1.0\"\n cubeclient = { path = \"../cubeclient\" }\ndiff --git a/rust/cubesql/src/compile/engine/df/intervals.rs b/rust/cubesql/src/compile/engine/df/intervals.rs\nnew file mode 100644\nindex 0000000..9e6cb7e\n--- /dev/null\n+++ b/rust/cubesql/src/compile/engine/df/intervals.rs\n@@ -0,0 +1,51 @@\n+#[macro_export]\n+macro_rules! make_string_interval_year_month {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let interval = $array.value($row) as f64;\n+ let years = (interval / 12_f64).floor();\n+ let month = interval - (years * 12_f64);\n+\n+ format!(\n+ \"{} years {} mons 0 days 0 hours 0 mins 0.00 secs\",\n+ years, month,\n+ )\n+ };\n+\n+ s\n+ }};\n+}\n+\n+#[macro_export]\n+macro_rules! make_string_interval_day_time {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let value: u64 = $array.value($row) as u64;\n+\n+ let days_parts: i32 = ((value & 0xFFFFFFFF00000000) >> 32) as i32;\n+ let milliseconds_part: i32 = (value & 0xFFFFFFFF) as i32;\n+\n+ let secs = milliseconds_part / 1000;\n+ let mins = secs / 60;\n+ let hours = mins / 60;\n+\n+ let secs = secs - (mins * 60);\n+ let mins = mins - (hours * 60);\n+\n+ format!(\n+ \"0 years 0 mons {} days {} hours {} mins {}.{:02} secs\",\n+ days_parts,\n+ hours,\n+ mins,\n+ secs,\n+ (milliseconds_part % 1000),\n+ )\n+ };\n+\n+ s\n+ }};\n+}\ndiff --git a/rust/cubesql/src/compile/engine/df/mod.rs b/rust/cubesql/src/compile/engine/df/mod.rs\nindex a19a970..3097523 100644\n--- a/rust/cubesql/src/compile/engine/df/mod.rs\n+++ b/rust/cubesql/src/compile/engine/df/mod.rs\n@@ -1 +1,2 @@\n pub mod coerce;\n+pub mod intervals;\ndiff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex 55b8bc1..0e160b3 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -1,14 +1,19 @@\n use std::any::type_name;\n use std::sync::Arc;\n \n+\n use datafusion::{\n arrow::{\n array::{\n ArrayRef, BooleanArray, BooleanBuilder, GenericStringArray, Int32Builder,\n- PrimitiveArray, StringBuilder, UInt32Builder,\n+ IntervalDayTimeBuilder, PrimitiveArray, StringBuilder,\n+ UInt32Builder,\n },\n compute::cast,\n- datatypes::{DataType, Int64Type},\n+ datatypes::{\n+ DataType, Int64Type, IntervalUnit, TimeUnit,\n+ TimestampNanosecondType,\n+ },\n },\n error::DataFusionError,\n logical_plan::create_udf,\n@@ -399,3 +404,63 @@ pub fn create_convert_tz_udf() -> ScalarUDF {\n &fun,\n )\n }\n+\n+pub fn create_timediff_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 2);\n+\n+ let left_dt = &args[0];\n+ let right_dt = &args[1];\n+\n+ let left_date = match left_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(left_dt, \"left_dt\", TimestampNanosecondType);\n+ let ts = arr.value(0);\n+\n+ // NaiveDateTime::from_timestamp(ts, 0)\n+ ts\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"left_dt argument must be a Timestamp, actual: {}\",\n+ left_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let right_date = match right_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(right_dt, \"right_dt\", TimestampNanosecondType);\n+ arr.value(0)\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"right_dt argument must be a Timestamp, actual: {}\",\n+ right_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let diff = right_date - left_date;\n+ if diff != 0 {\n+ return Err(DataFusionError::NotImplemented(format!(\n+ \"timediff is not implemented, it's stub\"\n+ )));\n+ }\n+\n+ let mut interal_arr = IntervalDayTimeBuilder::new(1);\n+ interal_arr.append_value(diff)?;\n+\n+ Ok(Arc::new(interal_arr.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime))));\n+\n+ ScalarUDF::new(\n+ \"timediff\",\n+ &Signature::any(2, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex a88da57..6121aa0 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -32,8 +32,8 @@ use self::engine::context::SystemVar;\n use self::engine::provider::CubeContext;\n use self::engine::udf::{\n create_connection_id_udf, create_convert_tz_udf, create_current_user_udf, create_db_udf,\n- create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_user_udf,\n- create_version_udf,\n+ create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_timediff_udf,\n+ create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1450,6 +1450,7 @@ impl QueryPlanner {\n ctx.register_udf(create_if_udf());\n ctx.register_udf(create_least_udf());\n ctx.register_udf(create_convert_tz_udf());\n+ ctx.register_udf(create_timediff_udf());\n \n let state = ctx.state.lock().unwrap().clone();\n let cube_ctx = CubeContext::new(&state, &self.context.cubes);\n@@ -3226,6 +3227,25 @@ mod tests {\n }\n \n #[tokio::test]\n+ async fn test_timediff() -> Result<(), CubeError> {\n+ assert_eq!(\n+ execute_df_query(\n+ \"select \\\n+ timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1\n+ \".to_string()\n+ )\n+ .await?,\n+ \"+------------------------------------------------+\\n\\\n+ | r1 |\\n\\\n+ +------------------------------------------------+\\n\\\n+ | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\\n\\\n+ +------------------------------------------------+\"\n+ );\n+\n+ Ok(())\n+ }\n+\n+ #[tokio::test]\n async fn test_metabase() -> Result<(), CubeError> {\n assert_eq!(\n execute_df_query(\ndiff --git a/rust/cubesql/src/mysql/dataframe.rs b/rust/cubesql/src/mysql/dataframe.rs\nindex fa246aa..2443458 100644\n--- a/rust/cubesql/src/mysql/dataframe.rs\n+++ b/rust/cubesql/src/mysql/dataframe.rs\n@@ -3,9 +3,10 @@ use std::fmt::{self, Debug, Formatter};\n use chrono::{SecondsFormat, TimeZone, Utc};\n use comfy_table::{Cell, Table};\n use datafusion::arrow::array::{\n- Array, Float64Array, Int32Array, Int64Array, StringArray, TimestampMicrosecondArray,\n- UInt32Array,\n+ Array, Float64Array, Int32Array, Int64Array, IntervalDayTimeArray, IntervalYearMonthArray,\n+ StringArray, TimestampMicrosecondArray, UInt32Array,\n };\n+use datafusion::arrow::datatypes::IntervalUnit;\n use datafusion::arrow::{\n array::{BooleanArray, TimestampNanosecondArray, UInt64Array},\n datatypes::{DataType, TimeUnit},\n@@ -15,6 +16,7 @@ use log::{error, warn};\n use msql_srv::{ColumnFlags, ColumnType};\n \n use crate::{compile::builder::CompiledQueryFieldMeta, CubeError};\n+use crate::{make_string_interval_day_time, make_string_interval_year_month};\n \n #[derive(Clone, Debug)]\n pub struct Column {\n@@ -309,6 +311,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro\n DataType::Binary => Ok(ColumnType::MYSQL_TYPE_BLOB),\n DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Timestamp(_, _) => Ok(ColumnType::MYSQL_TYPE_STRING),\n+ DataType::Interval(_) => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Float16 | DataType::Float64 => Ok(ColumnType::MYSQL_TYPE_DOUBLE),\n DataType::Boolean => Ok(ColumnType::MYSQL_TYPE_TINY),\n DataType::Int8\n@@ -402,6 +405,24 @@ pub fn batch_to_dataframe(batches: &Vec<RecordBatch>) -> Result<DataFrame, CubeE\n });\n }\n }\n+ DataType::Interval(IntervalUnit::DayTime) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalDayTimeArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_day_time!(a, i)));\n+ }\n+ }\n+ DataType::Interval(IntervalUnit::YearMonth) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalYearMonthArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_year_month!(a, i)));\n+ }\n+ }\n DataType::Boolean => {\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n for i in 0..num_rows {\n"]
4
["10bdcb452ff9d2b884d45a9c43a4b8a20fc4a883", "7dc3b70fe40fc7de255a28bb3098bcb8c0d35365", "f86944ff00b970d7e2da48abbff43e58bdf29b99", "29dfb9716298c5a579c0ffba6742e13a29325670"]
["ci", "fix", "refactor", "feat"]
remove unused,skip if related view/hook/column of a filter is not found Signed-off-by: Pranav C <[email protected]>,change min checked results for score calculation,run nix macos jobs on macos-13 to try and avoid SIP
["diff --git a/src/content/redux/modules/dictionaries.ts b/src/content/redux/modules/dictionaries.ts\nindex 88f7215..570d397 100644\n--- a/src/content/redux/modules/dictionaries.ts\n+++ b/src/content/redux/modules/dictionaries.ts\n@@ -3,7 +3,6 @@ import { DictID, appConfigFactory, AppConfig } from '@/app-config'\n import isEqual from 'lodash/isEqual'\n import { saveWord } from '@/_helpers/record-manager'\n import { getDefaultSelectionInfo, SelectionInfo, isSameSelection } from '@/_helpers/selection'\n-import { createActiveConfigStream } from '@/_helpers/config-manager'\n import { isContainChinese, isContainEnglish, testerPunct, isContainMinor, testerChinese, testJapanese, testKorean } from '@/_helpers/lang-check'\n import { MsgType, MsgFetchDictResult } from '@/typings/message'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\ndiff --git a/src/content/redux/modules/widget.ts b/src/content/redux/modules/widget.ts\nindex 53ad550..68e0a3d 100644\n--- a/src/content/redux/modules/widget.ts\n+++ b/src/content/redux/modules/widget.ts\n@@ -1,9 +1,9 @@\n import * as recordManager from '@/_helpers/record-manager'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\n-import appConfigFactory, { TCDirection, AppConfig, DictID } from '@/app-config'\n+import appConfigFactory, { TCDirection, DictID } from '@/app-config'\n import { message, storage } from '@/_helpers/browser-api'\n-import { createActiveConfigStream, createConfigIDListStream } from '@/_helpers/config-manager'\n-import { MsgSelection, MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n+import { createConfigIDListStream } from '@/_helpers/config-manager'\n+import { MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n import { searchText, restoreDicts } from '@/content/redux/modules/dictionaries'\n import { SelectionInfo, getDefaultSelectionInfo } from '@/_helpers/selection'\n import { Mutable } from '@/typings/helpers'\n", "diff --git a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\nindex 1515f88..6c250bd 100644\n--- a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\n+++ b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\n@@ -21,7 +21,13 @@ export default async function ({ ncMeta }: NcUpgraderCtx) {\n } else {\n continue;\n }\n- if (filter.project_id != model.project_id) {\n+\n+ // skip if related model is not found\n+ if (!model) {\n+ continue;\n+ }\n+\n+ if (filter.project_id !== model.project_id) {\n await ncMeta.metaUpdate(\n null,\n null,\n", "diff --git a/server/src/services/courseService.ts b/server/src/services/courseService.ts\nindex 89633f4..10bfc55 100644\n--- a/server/src/services/courseService.ts\n+++ b/server/src/services/courseService.ts\n@@ -580,8 +580,7 @@ export async function getTaskSolutionCheckers(courseTaskId: number, minCheckedCo\n .createQueryBuilder('tsr')\n .select('tsr.\"studentId\", ROUND(AVG(tsr.score)) as \"score\"')\n .where(qb => {\n- // query students with 3 checked tasks\n-\n+ // query students who checked enough tasks\n const query = qb\n .subQuery()\n .select('r.\"checkerId\"')\n@@ -600,7 +599,7 @@ export async function getTaskSolutionCheckers(courseTaskId: number, minCheckedCo\n })\n .andWhere('tsr.\"courseTaskId\" = :courseTaskId', { courseTaskId })\n .groupBy('tsr.\"studentId\"')\n- .having(`COUNT(tsr.id) >= :count`, { count: minCheckedCount })\n+ .having(`COUNT(tsr.id) >= :count`, { count: minCheckedCount - 1 })\n .getRawMany();\n \n return records.map(record => ({ studentId: record.studentId, score: Number(record.score) }));\n", "diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml\nnew file mode 100644\nindex 0000000..5be7d17\n--- /dev/null\n+++ b/.github/actionlint.yaml\n@@ -0,0 +1,7 @@\n+self-hosted-runner:\n+ # Labels of self-hosted runner in array of strings.\n+ labels: [macos-13]\n+# Configuration variables in array of strings defined in your repository or\n+# organization. `null` means disabling configuration variables check.\n+# Empty array means no configuration variable is allowed.\n+config-variables: null\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex e37346c..dce77e1 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -37,7 +37,7 @@ jobs:\n - \"3.10\"\n - \"3.11\"\n include:\n- - os: macos-latest\n+ - os: macos-13\n python-version: \"3.10\"\n steps:\n - name: checkout\ndiff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 005a850..8db22e2 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -3,7 +3,7 @@ ci:\n autofix_prs: false\n autoupdate_commit_msg: \"chore(deps): pre-commit.ci autoupdate\"\n skip:\n- - actionlint\n+ - actionlint-system\n - deadnix\n - just\n - nixpkgs-fmt\n@@ -17,9 +17,9 @@ default_stages:\n - commit\n repos:\n - repo: https://github.com/rhysd/actionlint\n- rev: v1.6.24\n+ rev: v1.6.25\n hooks:\n- - id: actionlint\n+ - id: actionlint-system\n - repo: https://github.com/psf/black\n rev: 23.3.0\n hooks:\n@@ -30,7 +30,7 @@ repos:\n - id: nbstripout\n exclude: .+/rendered/.+\n - repo: https://github.com/codespell-project/codespell\n- rev: v2.2.4\n+ rev: v2.2.5\n hooks:\n - id: codespell\n additional_dependencies:\n"]
4
["a50b51999015e210918d9c8e95fd4cac347353be", "ab1e60a97c6d5c688dacbd23bca40cb8f20c4ac3", "fd849bd08363df60dbc8b9b6d55bac4f5ace88f4", "54cb6d4643b4a072ff997592a7fa14a69a6c068d"]
["refactor", "fix", "docs", "ci"]
pass absolute burnchain block height to pox sync watchdog so we correctly infer ibd status,do not pin time in tests but only skip ahead related to #573,correct code comment,updated riot to v6, fixed build
["diff --git a/testnet/stacks-node/src/run_loop/neon.rs b/testnet/stacks-node/src/run_loop/neon.rs\nindex 677749b..dc4a7bd 100644\n--- a/testnet/stacks-node/src/run_loop/neon.rs\n+++ b/testnet/stacks-node/src/run_loop/neon.rs\n@@ -411,7 +411,6 @@ impl RunLoop {\n \n let mut burnchain_height = sortition_db_height;\n let mut num_sortitions_in_last_cycle = 1;\n- let mut learned_burnchain_height = false;\n \n // prepare to fetch the first reward cycle!\n target_burnchain_block_height = burnchain_height + pox_constants.reward_cycle_length as u64;\n@@ -439,18 +438,16 @@ impl RunLoop {\n break;\n }\n \n+ let remote_chain_height = burnchain.get_headers_height();\n+\n // wait for the p2p state-machine to do at least one pass\n- debug!(\"Wait until we reach steady-state before processing more burnchain blocks...\");\n+ debug!(\"Wait until we reach steady-state before processing more burnchain blocks (chain height is {}, we are at {})...\", remote_chain_height, burnchain_height);\n \n // wait until it's okay to process the next sortitions\n let ibd = match pox_watchdog.pox_sync_wait(\n &burnchain_config,\n &burnchain_tip,\n- if learned_burnchain_height {\n- Some(burnchain_height)\n- } else {\n- None\n- },\n+ Some(remote_chain_height),\n num_sortitions_in_last_cycle,\n ) {\n Ok(ibd) => ibd,\n@@ -478,7 +475,6 @@ impl RunLoop {\n };\n \n // *now* we know the burnchain height\n- learned_burnchain_height = true;\n burnchain_tip = next_burnchain_tip;\n burnchain_height = cmp::min(burnchain_height + 1, target_burnchain_block_height);\n \n", "diff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\nindex 636cd21..76afff7 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.doRepeatedly;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n@@ -24,11 +26,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.assertj.core.util.Files;\n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -38,7 +47,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -48,9 +59,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.assertj.core.util.Files;\n-import org.junit.*;\n-import org.junit.rules.*;\n \n public class BrokerRecoveryTest\n {\n@@ -360,17 +368,12 @@ public class BrokerRecoveryTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n // wait until stream processor and scheduler process the lock task event which is not re-processed on recovery\n doRepeatedly(() ->\n {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n+ ClockUtil.addTime(Duration.ofSeconds(60)); // retriggers lock expiration check in broker\n return null;\n }).until(t -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n \ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\nindex 5ff1301..0ffe98d 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n \n@@ -23,11 +25,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.experimental.categories.Category;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -37,7 +46,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -47,9 +58,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.junit.*;\n-import org.junit.experimental.categories.Category;\n-import org.junit.rules.*;\n \n public class BrokerRestartTest\n {\n@@ -360,11 +368,7 @@ public class BrokerRestartTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n waitUntil(() -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n recordingTaskHandler.clear();\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\nindex 49b527d..a322fbe 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n@@ -353,7 +353,7 @@ public class TaskSubscriptionTest\n waitUntil(() -> taskHandler.getHandledTasks().size() == 1);\n \n // when\n- ClockUtil.setCurrentTime(Instant.now().plus(Duration.ofMinutes(5)));\n+ ClockUtil.addTime(Duration.ofMinutes(5));\n \n // then\n waitUntil(() -> taskHandler.getHandledTasks().size() == 2);\n", "diff --git a/server/src/db.rs b/server/src/db.rs\nindex bfc5e17..0fb4d55 100644\n--- a/server/src/db.rs\n+++ b/server/src/db.rs\n@@ -389,7 +389,7 @@ impl Db {\n let partition = LockableCatalogPartition::new(Arc::clone(&self), partition);\n \n // Do lock dance to get a write lock on the partition as well\n- // as on all of the chunks\n+ // as on the to-be-dropped chunk.\n let partition = partition.read();\n \n let chunk = self.lockable_chunk(table_name, partition_key, chunk_id)?;\n", "diff --git a/components/riot/package.json b/components/riot/package.json\nindex c41743a..eb69756 100644\n--- a/components/riot/package.json\n+++ b/components/riot/package.json\n@@ -61,7 +61,7 @@\n },\n \"devDependencies\": {\n \"@babel/preset-typescript\": \"^7.14.5\",\n- \"@riotjs/cli\": \"^6.0.4\",\n+ \"@riotjs/cli\": \"^6.0.5\",\n \"@riotjs/compiler\": \"^6.0.0\",\n \"chai\": \"^4.3.4\",\n \"esm\": \"^3.2.25\",\n"]
4
["5b70e008c57efc89da4061f9adb7d0491b2ea644", "7ece3a9a16780dc6c633bbd903d36ce0aefd6a8a", "cccdd8a43fea7614f78b6f1dcf1765100928a3db", "5d256f937f93e5a5ed003df86d38c44834095a11"]
["fix", "test", "docs", "build"]
added components pages to typedoc output,use new, public `quay.io/influxdb/iox` image,Added tooltip for Data sources table buttons only on small screen,update version (v0.6.18)
["diff --git a/core/main/tsconfig.json b/core/main/tsconfig.json\nindex c4474a7..7916bc5 100644\n--- a/core/main/tsconfig.json\n+++ b/core/main/tsconfig.json\n@@ -96,11 +96,35 @@\n \"particles\": {\n \"groups\": [\n {\n- \"title\": \"Documentation\",\n+ \"title\": \"Components\",\n \"pages\": [\n {\n- \"title\": \"My Page\",\n- \"source\": \"./markdown/pages/index.md\"\n+ \"title\": \"Angular\",\n+ \"source\": \"../../components/angular/README.md\"\n+ },\n+ {\n+ \"title\": \"React\",\n+ \"source\": \"../../components/react/README.md\"\n+ },\n+ {\n+ \"title\": \"Vue\",\n+ \"source\": \"../../components/vue/README.md\"\n+ },\n+ {\n+ \"title\": \"Svelte\",\n+ \"source\": \"../../components/svelte/README.md\"\n+ },\n+ {\n+ \"title\": \"jQuery\",\n+ \"source\": \"../../components/jquery/README.md\"\n+ },\n+ {\n+ \"title\": \"Preact\",\n+ \"source\": \"../../components/preact/README.md\"\n+ },\n+ {\n+ \"title\": \"Inferno\",\n+ \"source\": \"../../components/inferno/README.md\"\n }\n ]\n }\n", "diff --git a/.circleci/config.yml b/.circleci/config.yml\nindex 3ae6728..a5f2d2f 100644\n--- a/.circleci/config.yml\n+++ b/.circleci/config.yml\n@@ -12,7 +12,7 @@\n # The CI for every PR and merge to main runs tests, fmt, lints and compiles debug binaries\n #\n # On main if all these checks pass it will then additionally compile in \"release\" mode and\n-# publish a docker image to quay.io/influxdb/fusion:$COMMIT_SHA\n+# publish a docker image to quay.io/influxdb/iox:$COMMIT_SHA\n #\n # Manual CI Image:\n #\n@@ -317,11 +317,11 @@ jobs:\n #\n # Uses the latest ci_image (influxdb/rust below) to build a release binary and\n # copies it to a minimal container image based upon `rust:slim-buster`. This\n- # minimal image is then pushed to `quay.io/influxdb/fusion:${BRANCH}` with '/'\n+ # minimal image is then pushed to `quay.io/influxdb/iox:${BRANCH}` with '/'\n # repaced by '.' - as an example:\n #\n # git branch: dom/my-awesome-feature/perf\n- # container: quay.io/influxdb/fusion:dom.my-awesome-feature.perf\n+ # container: quay.io/influxdb/iox:dom.my-awesome-feature.perf\n #\n # Subsequent CI runs will overwrite the tag if you push more changes, so watch\n # out for parallel CI runs!\n@@ -365,7 +365,7 @@ jobs:\n sudo apt-get update\n sudo apt-get install -y docker.io\n - run: |\n- echo \"$QUAY_PASS\" | docker login quay.io --username $QUAY_USER --password-stdin\n+ echo \"$QUAY_INFLUXDB_IOX_PASS\" | docker login quay.io --username $QUAY_INFLUXDB_IOX_USER --password-stdin\n - run:\n # Docker has functionality to support per-Dockerfile .dockerignore\n # This was added in https://github.com/moby/buildkit/pull/901\n@@ -379,8 +379,8 @@ jobs:\n echo sha256sum after build is\n sha256sum target/release/influxdb_iox\n COMMIT_SHA=$(git rev-parse --short HEAD)\n- docker build -t quay.io/influxdb/fusion:$COMMIT_SHA -f docker/Dockerfile.iox .\n- docker push quay.io/influxdb/fusion:$COMMIT_SHA\n+ docker build -t quay.io/influxdb/iox:$COMMIT_SHA -f docker/Dockerfile.iox .\n+ docker push quay.io/influxdb/iox:$COMMIT_SHA\n echo \"export COMMIT_SHA=${COMMIT_SHA}\" >> $BASH_ENV\n - run:\n name: Deploy tags\n", "diff --git a/packages/nc-gui/components/dashboard/settings/DataSources.vue b/packages/nc-gui/components/dashboard/settings/DataSources.vue\nindex 78caa98..0ed5df9 100644\n--- a/packages/nc-gui/components/dashboard/settings/DataSources.vue\n+++ b/packages/nc-gui/components/dashboard/settings/DataSources.vue\n@@ -351,59 +351,78 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- v-if=\"!sources[0].is_meta && !sources[0].is_local\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- size=\"small\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n+ <NcTooltip v-if=\"!sources[0].is_meta && !sources[0].is_local\" overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ size=\"small\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.audit') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.audit') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.audit') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n <div class=\"ds-table-col ds-table-crud\">\n@@ -450,67 +469,92 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ </div>\n+ </div>\n+ <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.edit') }}\n+ </template>\n <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n </NcButton>\n+ </NcTooltip>\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.delete') }}\n+ </template>\n <NcButton\n v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ @click=\"openDeleteBase(source)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n </NcButton>\n- </div>\n- </div>\n- <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n- >\n- <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n- </NcButton>\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"openDeleteBase(source)\"\n- >\n- <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n- </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n </template>\ndiff --git a/packages/nc-gui/components/nc/Tooltip.vue b/packages/nc-gui/components/nc/Tooltip.vue\nindex 0810b8b..97b159e 100644\n--- a/packages/nc-gui/components/nc/Tooltip.vue\n+++ b/packages/nc-gui/components/nc/Tooltip.vue\n@@ -12,6 +12,7 @@ interface Props {\n disabled?: boolean\n placement?: TooltipPlacement | undefined\n hideOnClick?: boolean\n+ overlayClassName?: string\n }\n \n const props = defineProps<Props>()\n@@ -36,6 +37,8 @@ const attrs = useAttrs()\n \n const isKeyPressed = ref(false)\n \n+const overlayClassName = computed(() => props.overlayClassName)\n+\n onKeyStroke(\n (e) => e.key === modifierKey.value,\n (e) => {\n@@ -100,7 +103,7 @@ const onClick = () => {\n <template>\n <a-tooltip\n v-model:visible=\"showTooltip\"\n- :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'}`\"\n+ :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'} ${overlayClassName}`\"\n :overlay-style=\"tooltipStyle\"\n arrow-point-at-center\n :trigger=\"[]\"\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n"]
4
["fca2c198c6486c4d586b1af1832be46f19667235", "f751bb5426b87f82096d620f1cd6203badf45d58", "a75538817c20fc4132718fd7b586bf835a5795e3", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50"]
["docs", "ci", "feat", "build"]
move toolbar to tab content level Signed-off-by: Pranav C <[email protected]>,update get-started,add gitignore.nix to dep update matrix,apply element migrated events This is a very straightforward event applier. All it needs to do is update the persisted data for the element instance using the data in the event.
["diff --git a/packages/nc-gui-v2/components.d.ts b/packages/nc-gui-v2/components.d.ts\nindex f6be04b..cf555ef 100644\n--- a/packages/nc-gui-v2/components.d.ts\n+++ b/packages/nc-gui-v2/components.d.ts\n@@ -201,6 +201,7 @@ declare module '@vue/runtime-core' {\n MdiThumbUp: typeof import('~icons/mdi/thumb-up')['default']\n MdiTrashCan: typeof import('~icons/mdi/trash-can')['default']\n MdiTwitter: typeof import('~icons/mdi/twitter')['default']\n+ MdiUpload: typeof import('~icons/mdi/upload')['default']\n MdiUploadOutline: typeof import('~icons/mdi/upload-outline')['default']\n MdiViewListOutline: typeof import('~icons/mdi/view-list-outline')['default']\n MdiWhatsapp: typeof import('~icons/mdi/whatsapp')['default']\ndiff --git a/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue b/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\nindex c2c87d3..27c0acc 100644\n--- a/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\n+++ b/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\n@@ -132,7 +132,7 @@ async function changeLockType(type: LockType) {\n <div>\n <a-dropdown>\n <a-button v-t=\"['c:actions']\" class=\"nc-actions-menu-btn nc-toolbar-btn\">\n- <div class=\"flex gap-2 align-center\">\n+ <div class=\"flex gap-2 items-center\">\n <component\n :is=\"viewIcons[selectedView?.type].icon\"\n class=\"nc-view-icon group-hover:hidden\"\n@@ -311,6 +311,6 @@ async function changeLockType(type: LockType) {\n \n <style scoped>\n .nc-locked-menu-item > div {\n- @apply grid grid-cols-[30px,auto] gap-2 p-2 align-center;\n+ @apply grid grid-cols-[30px,auto] gap-2 p-2 items-center;\n }\n </style>\ndiff --git a/packages/nc-gui-v2/components/smartsheet/Toolbar.vue b/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\nindex 5fa555f..d498871 100644\n--- a/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\n@@ -36,7 +36,7 @@ const {isOpen} =useSidebar()\n \n <SmartsheetToolbarSearchData v-if=\"(isGrid || isGallery) && !isPublic\" class=\"shrink mr-2 ml-2\" />\n \n- <ToggleDrawer v-if=\"!isOpen\"/>\n+ <ToggleDrawer class=\"mr-2\"/>\n \n \n </div>\ndiff --git a/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue b/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\nindex 896ad62..77aee05 100644\n--- a/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\n@@ -99,6 +99,7 @@ function onCreate(view: GridType | FormType | KanbanType | GalleryType) {\n class=\"relative shadow-md h-full\"\n theme=\"light\"\n >\n+ <!--\n <Toolbar\n v-if=\"isOpen\"\n class=\"min-h-[var(--toolbar-height)] max-h-[var(--toolbar-height)]\"\n@@ -128,7 +129,7 @@ function onCreate(view: GridType | FormType | KanbanType | GalleryType) {\n <div v-if=\"!isForm\" class=\"dot\" />\n </template>\n </Toolbar>\n-\n+-->\n <div v-if=\"isOpen\" class=\"flex-1 flex flex-col\">\n <MenuTop @open-modal=\"openModal\" @deleted=\"loadViews\" @sorted=\"loadViews\" />\n \ndiff --git a/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue b/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\nindex 3e3d78a..8441450 100644\n--- a/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\n@@ -4,7 +4,7 @@ const { isOpen, toggle } = useSidebar({ storageKey: 'nc-right-sidebar' })\n </script>\n \n <template>\n- <a-tooltip :placement=\"isOpen ? 'bottomRight' : 'left'\" :mouse-enter-delay=\"0.8\">\n+<!-- <a-tooltip :placement=\"isOpen ? 'bottomRight' : 'left'\" :mouse-enter-delay=\"0.8\">\n <template #title> Toggle sidebar</template>\n \n <div class=\"nc-sidebar-right-item hover:after:(bg-primary bg-opacity-75) group nc-sidebar-add-row\">\n@@ -14,5 +14,11 @@ const { isOpen, toggle } = useSidebar({ storageKey: 'nc-right-sidebar' })\n @click=\"toggle(!isOpen)\"\n />\n </div>\n- </a-tooltip>\n+ </a-tooltip>-->\n+\n+ <a-button @click=\"toggle(!isOpen)\" size=\"small\">\n+ <div class=\"flex items-center gap-2\"> <MdiMenu/> Views\n+ </div>\n+ </a-button>\n+\n </template>\ndiff --git a/packages/nc-gui-v2/components/tabs/Smartsheet.vue b/packages/nc-gui-v2/components/tabs/Smartsheet.vue\nindex 4181996..7b7ec36 100644\n--- a/packages/nc-gui-v2/components/tabs/Smartsheet.vue\n+++ b/packages/nc-gui-v2/components/tabs/Smartsheet.vue\n@@ -83,11 +83,11 @@ watch(isLocked, (nextValue) => (treeViewIsLockedInj.value = nextValue), { immedi\n \n <SmartsheetForm v-else-if=\"isForm\" />\n </div>\n+ <SmartsheetSidebar class=\"nc-right-sidebar\" v-if=\"meta\" />\n </div>\n </template>\n </div>\n \n- <SmartsheetSidebar class=\"nc-right-sidebar\" v-if=\"meta\" />\n </div>\n </template>\n \n", "diff --git a/docs/src/go-client/get-started.md b/docs/src/go-client/get-started.md\nindex 4f4405f..a792e0e 100755\n--- a/docs/src/go-client/get-started.md\n+++ b/docs/src/go-client/get-started.md\n@@ -199,14 +199,12 @@ workflowKey:1 bpmnProcessId:\"order-process\" version:1 workflowInstanceKey:6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n \n ## Work on a task\n@@ -322,7 +320,7 @@ it encounters a problem while processing the job.\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/go-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n When you run the above example you should see similar output:\n \ndiff --git a/docs/src/go-client/java-get-started-monitor-1.gif b/docs/src/go-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/go-client/java-get-started-monitor-2.gif b/docs/src/go-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/go-client/zeebe-monitor-1.png b/docs/src/go-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-2.png b/docs/src/go-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-3.png b/docs/src/go-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-3.png and /dev/null differ\ndiff --git a/docs/src/introduction/quickstart.md b/docs/src/introduction/quickstart.md\nindex 70abacf..68be28b 100644\n--- a/docs/src/introduction/quickstart.md\n+++ b/docs/src/introduction/quickstart.md\n@@ -215,7 +215,7 @@ and completed by a [job worker](/basics/job-workers.html). A job worker is a\n long living process which repeatedly tries to activate jobs for a given job\n type and completes them after executing its business logic. The `zbctl` also\n provides a command to spawn simple job workers using an external command or\n-script. The job worker will receive for every job the payload as JSON object on\n+script. The job worker will receive for every job the workflow instance variables as JSON object on\n `stdin` and has to return its result also as JSON object on `stdout` if it\n handled the job successfully.\n \ndiff --git a/docs/src/java-client/get-started.md b/docs/src/java-client/get-started.md\nindex 54d2208..afc1fd4 100755\n--- a/docs/src/java-client/get-started.md\n+++ b/docs/src/java-client/get-started.md\n@@ -21,9 +21,9 @@ You will be guided through the following steps:\n * [Zeebe Modeler](https://github.com/zeebe-io/zeebe-modeler/releases)\n * [Zeebe Monitor](https://github.com/zeebe-io/zeebe-simple-monitor/releases)\n \n-Before you begin to setup your project please start the broker, i.e. by running the start up script \n-`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the \n-address `localhost:26500`, which is used as contact point in this guide. In case your broker is \n+Before you begin to setup your project please start the broker, i.e. by running the start up script\n+`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the\n+address `localhost:26500`, which is used as contact point in this guide. In case your broker is\n available under another address please adjust the broker contact point when building the client.\n \n ## Set up a project\n@@ -182,14 +182,12 @@ Workflow instance created. Key: 6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n ## Work on a job\n \n@@ -205,12 +203,9 @@ Insert a few service tasks between the start and the end event.\n You need to set the type of each task, which identifies the nature of the work to be performed.\n Set the type of the first task to 'payment-service'.\n \n-Optionally, you can define parameters of the task by adding headers.\n-Add the header `method = VISA` to the first task.\n-\n Save the BPMN diagram and switch back to the main class.\n \n-Add the following lines to create a [job worker][] for the first jobs type:\n+Add the following lines to create a job worker for the first jobs type:\n \n ```java\n package io.zeebe;\n@@ -227,10 +222,7 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Collect money\");\n \n // ...\n \n@@ -252,40 +244,29 @@ public class Application\n Run the program and verify that the job is processed. You should see the output:\n \n ```\n-Collect money using payment method: VISA\n+Collect money\n ```\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/java-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n ## Work with data\n \n-Usually, a workflow is more than just tasks, there is also data flow.\n-The tasks need data as input and in order to produce data.\n+Usually, a workflow is more than just tasks, there is also a data flow. The worker gets the data from the workflow instance to do its work and send the result back to the workflow instance.\n \n-In Zeebe, the data is represented as a JSON document.\n-When you create a workflow instance, then you can pass the data as payload.\n-Within the workflow, you can use input and output mappings on tasks to control the data flow.\n+In Zeebe, the data is stored as key-value-pairs in form of variables. Variables can be set when the workflow instance is created. Within the workflow, variables can be read and modified by workers.\n \n-In our example, we want to create a workflow instance with the following data:\n+In our example, we want to create a workflow instance with the following variables:\n \n ```json\n-{\n- \"orderId\": 31243,\n- \"orderItems\": [435, 182, 376]\n-}\n+\"orderId\": 31243\n+\"orderItems\": [435, 182, 376]\n ```\n \n-The first task should take `orderId` as input and return `totalPrice` as result.\n-\n-Open the BPMN diagram and switch to the input-output-mappings of the first task.\n-Add the input mapping `$.orderId : $.orderId` and the output mapping `$.totalPrice : $.totalPrice`.\n+The first task should read `orderId` as input and return `totalPrice` as result.\n \n-Save the BPMN diagram and go back to the main class.\n-\n-Modify the create command and pass the data as variables.\n-Also, modify the job worker to read the jobs payload and complete the job with payload.\n+Modify the workflow instance create command and pass the data as variables. Also, modify the job worker to read the job variables and complete the job with a result.\n \n ```java\n package io.zeebe;\n@@ -313,23 +294,22 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- final Map<String, Object> payload = job.getPayloadAsMap();\n+ final Map<String, Object> variables = job.getVariablesAsMap();\n \n- System.out.println(\"Process order: \" + payload.get(\"orderId\"));\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Process order: \" + variables.get(\"orderId\"));\n+ System.out.println(\"Collect money\");\n \n // ...\n \n- payload.put(\"totalPrice\", 46.50);\n+ final Map<String, Object> result = new HashMap<>();\n+ result.put(\"totalPrice\", 46.50);\n \n jobClient.newCompleteCommand(job.getKey())\n- .payload(payload)\n+ .variables(result)\n .send()\n .join();\n })\n+ .fetchVariables(\"orderId\")\n .open();\n \n // ...\n@@ -337,16 +317,16 @@ public class Application\n }\n ```\n \n-Run the program and verify that the payload is mapped into the job. You should see the output:\n+Run the program and verify that the variable is read. You should see the output:\n \n ```\n-Process order: {\"orderId\":31243}\n-Collect money using payment method: VISA\n+Process order: 31243\n+Collect money\n ```\n \n-When we have a look at the Zeebe Monitor, then we can see how the payload is modified after the activity:\n+When we have a look at the Zeebe Monitor, then we can see that the variable `totalPrice` is set:\n \n-![zeebe-monitor-step-3](/java-client/zeebe-monitor-3.png)\n+![zeebe-monitor-step-3](/java-client/java-get-started-monitor-3.gif)\n \n ## What's next?\n \ndiff --git a/docs/src/java-client/java-get-started-monitor-1.gif b/docs/src/java-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-2.gif b/docs/src/java-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-3.gif b/docs/src/java-client/java-get-started-monitor-3.gif\nnew file mode 100644\nindex 0000000..1f6cb56\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-3.gif differ\ndiff --git a/docs/src/java-client/zeebe-monitor-1.png b/docs/src/java-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-2.png b/docs/src/java-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-3.png b/docs/src/java-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-3.png and /dev/null differ\n", "diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml\nindex e0ccd62..1236f58 100644\n--- a/.github/workflows/update-deps.yml\n+++ b/.github/workflows/update-deps.yml\n@@ -13,6 +13,7 @@ jobs:\n - nixpkgs\n - poetry2nix\n - pre-commit-hooks\n+ - gitignore.nix\n steps:\n - name: Checkout\n uses: actions/checkout@v2\n", "diff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\nindex da05e13..9231df3 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n@@ -154,6 +154,9 @@ public final class EventAppliers implements EventApplier {\n register(\n ProcessInstanceIntent.SEQUENCE_FLOW_TAKEN,\n new ProcessInstanceSequenceFlowTakenApplier(elementInstanceState, processState));\n+ register(\n+ ProcessInstanceIntent.ELEMENT_MIGRATED,\n+ new ProcessInstanceElementMigratedApplier(elementInstanceState));\n }\n \n private void registerProcessInstanceCreationAppliers(final MutableProcessingState state) {\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\nindex e5a0f3a..d38358f 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n@@ -24,5 +24,16 @@ final class ProcessInstanceElementMigratedApplier\n }\n \n @Override\n- public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {}\n+ public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {\n+ elementInstanceState.updateInstance(\n+ elementInstanceKey,\n+ elementInstance ->\n+ elementInstance\n+ .getValue()\n+ .setProcessDefinitionKey(value.getProcessDefinitionKey())\n+ .setBpmnProcessId(value.getBpmnProcessId())\n+ .setVersion(value.getVersion())\n+ .setElementId(value.getElementId())\n+ .setFlowScopeKey(value.getFlowScopeKey()));\n+ }\n }\n"]
4
["bf95d5d0b34d32ef2684488feb3de01cb824b2b4", "cf6d526123abab2689b24a06aaf03d8e4d6ddff4", "c444fdb9e85ce44c5c0c99addc777dd7b6085153", "39d5d1cfe8d2210305df2c8fab4a4ae430732cf7"]
["refactor", "docs", "ci", "feat"]
Use arm64v8 postfix for Cube Store :dev build,build improvements,await job creation to ensure asserted event sequence,set cursor position in setHorizontalRule correctly, fix #2429
["diff --git a/.github/workflows/rust-cubestore-master.yml b/.github/workflows/rust-cubestore-master.yml\nindex 4a84984..bb07cd7 100644\n--- a/.github/workflows/rust-cubestore-master.yml\n+++ b/.github/workflows/rust-cubestore-master.yml\n@@ -115,9 +115,9 @@ jobs:\n if [[ $VERSION =~ ^v[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$ ]]; then\n MINOR=${VERSION%.*}\n MAJOR=${MINOR%.*}\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR},${DOCKER_IMAGE}:latest\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR}\"\n elif [ \"${{ github.event_name }}\" = \"push\" ]; then\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}${{ matrix.postfix }}\"\n fi\n \n echo ::set-output name=version::${VERSION}\n", "diff --git a/.travis.yml b/.travis.yml\nindex 9e1b926..3144244 100644\n--- a/.travis.yml\n+++ b/.travis.yml\n@@ -1,5 +1,6 @@\n language: node_js\n dist: trusty\n+sudo: required\n node_js:\n - '6.9.5'\n before_install:\ndiff --git a/e2e/schematics/command-line.test.ts b/e2e/schematics/command-line.test.ts\nindex 16d8b34..ea91494 100644\n--- a/e2e/schematics/command-line.test.ts\n+++ b/e2e/schematics/command-line.test.ts\n@@ -68,8 +68,6 @@ describe('Command line', () => {\n \n updateFile('apps/myapp/src/app/app.component.spec.ts', `import '@nrwl/mylib';`);\n \n- updateRunAffectedToWorkInE2ESetup();\n-\n const affectedApps = runCommand('npm run affected:apps -- --files=\"libs/mylib/index.ts\"');\n expect(affectedApps).toContain('myapp');\n expect(affectedApps).not.toContain('myapp2');\n@@ -147,11 +145,3 @@ describe('Command line', () => {\n 1000000\n );\n });\n-\n-function updateRunAffectedToWorkInE2ESetup() {\n- const runAffected = readFile('node_modules/@nrwl/schematics/src/command-line/affected.js');\n- const newRunAffected = runAffected\n- .replace('ng build', '../../node_modules/.bin/ng build')\n- .replace('ng e2e', '../../node_modules/.bin/ng e2e');\n- updateFile('node_modules/@nrwl/schematics/src/command-line/affected.js', newRunAffected);\n-}\ndiff --git a/e2e/schematics/workspace.test.ts b/e2e/schematics/workspace.test.ts\nindex 8a41070..8749926 100644\n--- a/e2e/schematics/workspace.test.ts\n+++ b/e2e/schematics/workspace.test.ts\n@@ -82,7 +82,7 @@ describe('Nrwl Convert to Nx Workspace', () => {\n \n it('should generate a workspace and not change dependencies or devDependencies if they already exist', () => {\n // create a new AngularCLI app\n- runNgNew('--skip-install');\n+ runNgNew();\n const nxVersion = '0.0.0';\n const schematicsVersion = '0.0.0';\n const ngrxVersion = '0.0.0';\ndiff --git a/e2e/utils.ts b/e2e/utils.ts\nindex 422d866..a03104f 100644\n--- a/e2e/utils.ts\n+++ b/e2e/utils.ts\n@@ -17,8 +17,7 @@ export function newProject(): void {\n copyMissingPackages();\n execSync('mv ./tmp/proj ./tmp/proj_backup');\n }\n- execSync('cp -r ./tmp/proj_backup ./tmp/proj');\n- setUpSynLink();\n+ execSync('cp -a ./tmp/proj_backup ./tmp/proj');\n }\n \n export function copyMissingPackages(): void {\n@@ -26,14 +25,9 @@ export function copyMissingPackages(): void {\n modulesToCopy.forEach(m => copyNodeModule(projectName, m));\n }\n \n-export function setUpSynLink(): void {\n- execSync(`ln -s ../@nrwl/schematics/src/command-line/nx.js tmp/${projectName}/node_modules/.bin/nx`);\n- execSync(`chmod +x tmp/${projectName}/node_modules/.bin/nx`);\n-}\n-\n function copyNodeModule(path: string, name: string) {\n execSync(`rm -rf tmp/${path}/node_modules/${name}`);\n- execSync(`cp -r node_modules/${name} tmp/${path}/node_modules/${name}`);\n+ execSync(`cp -a node_modules/${name} tmp/${path}/node_modules/${name}`);\n }\n \n export function runCLI(\n@@ -43,7 +37,7 @@ export function runCLI(\n }\n ): string {\n try {\n- return execSync(`../../node_modules/.bin/ng ${command}`, {\n+ return execSync(`./node_modules/.bin/ng ${command}`, {\n cwd: `./tmp/${projectName}`\n })\n .toString()\n@@ -67,7 +61,7 @@ export function newLib(name: string): string {\n }\n \n export function runSchematic(command: string): string {\n- return execSync(`../../node_modules/.bin/schematics ${command}`, {\n+ return execSync(`./node_modules/.bin/schematics ${command}`, {\n cwd: `./tmp/${projectName}`\n }).toString();\n }\ndiff --git a/package.json b/package.json\nindex bef54f8..9186a58 100644\n--- a/package.json\n+++ b/package.json\n@@ -6,7 +6,7 @@\n \"private\": true,\n \"scripts\": {\n \"build\": \"./scripts/build.sh\",\n- \"e2e\": \"yarn build && ./scripts/e2e.sh\",\n+ \"e2e\": \"./scripts/e2e.sh\",\n \"format\": \"./scripts/format.sh\",\n \"linknpm\": \"./scripts/link.sh\",\n \"package\": \"./scripts/package.sh\",\n@@ -14,7 +14,7 @@\n \"copy\": \"./scripts/copy.sh\",\n \"test:schematics\": \"yarn build && ./scripts/test_schematics.sh\",\n \"test:nx\": \"yarn build && ./scripts/test_nx.sh\",\n- \"test\": \"yarn build && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n+ \"test\": \"yarn linknpm && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n \"checkformat\": \"./scripts/check-format.sh\",\n \"publish_npm\": \"./scripts/publish.sh\"\n },\ndiff --git a/packages/schematics/src/collection/workspace/index.ts b/packages/schematics/src/collection/workspace/index.ts\nindex 8f8897f..c70d161 100644\n--- a/packages/schematics/src/collection/workspace/index.ts\n+++ b/packages/schematics/src/collection/workspace/index.ts\n@@ -254,20 +254,7 @@ function moveFiles(options: Schema) {\n \n function copyAngularCliTgz() {\n return (host: Tree) => {\n- copyFile(\n- path.join(\n- 'node_modules',\n- '@nrwl',\n- 'schematics',\n- 'src',\n- 'collection',\n- 'application',\n- 'files',\n- '__directory__',\n- '.angular_cli.tgz'\n- ),\n- '.'\n- );\n+ copyFile(path.join(__dirname, '..', 'application', 'files', '__directory__', '.angular_cli.tgz'), '.');\n return host;\n };\n }\ndiff --git a/packages/schematics/src/command-line/affected.ts b/packages/schematics/src/command-line/affected.ts\nindex b7f9173..89a4f72 100644\n--- a/packages/schematics/src/command-line/affected.ts\n+++ b/packages/schematics/src/command-line/affected.ts\n@@ -1,5 +1,7 @@\n import { execSync } from 'child_process';\n import { getAffectedApps, parseFiles } from './shared';\n+import * as path from 'path';\n+import * as resolve from 'resolve';\n \n export function affected(args: string[]): void {\n const command = args[0];\n@@ -39,7 +41,7 @@ function build(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Building ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n console.log('No apps to build');\n@@ -50,9 +52,13 @@ function e2e(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Testing ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n- console.log('No apps to tst');\n+ console.log('No apps to test');\n }\n }\n+\n+function ngPath() {\n+ return `${path.dirname(path.dirname(path.dirname(resolve.sync('@angular/cli', { basedir: __dirname }))))}/bin/ng`;\n+}\ndiff --git a/scripts/build.sh b/scripts/build.sh\nindex ac533b5..9b8891b 100755\n--- a/scripts/build.sh\n+++ b/scripts/build.sh\n@@ -3,6 +3,8 @@\n rm -rf build\n ngc\n rsync -a --exclude=*.ts packages/ build/packages\n+chmod +x build/packages/schematics/bin/create-nx-workspace.js\n+chmod +x build/packages/schematics/src/command-line/nx.js\n rm -rf build/packages/install\n cp README.md build/packages/schematics\n cp README.md build/packages/nx\n\\ No newline at end of file\n", "diff --git a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\nindex 9ffa1fa..4333db0 100644\n--- a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n@@ -114,12 +114,18 @@ public class BoundaryEventTest {\n ENGINE.deployment().withXmlResource(MULTIPLE_SEQUENCE_FLOWS).deploy();\n final long workflowInstanceKey = ENGINE.workflowInstance().ofBpmnProcessId(PROCESS_ID).create();\n \n- // when\n RecordingExporter.timerRecords()\n .withHandlerNodeId(\"timer\")\n .withIntent(TimerIntent.CREATED)\n .withWorkflowInstanceKey(workflowInstanceKey)\n .getFirst();\n+\n+ RecordingExporter.jobRecords(JobIntent.CREATED)\n+ .withType(\"type\")\n+ .withWorkflowInstanceKey(workflowInstanceKey)\n+ .getFirst();\n+\n+ // when\n ENGINE.increaseTime(Duration.ofMinutes(1));\n \n // then\n", "diff --git a/packages/extension-horizontal-rule/src/horizontal-rule.ts b/packages/extension-horizontal-rule/src/horizontal-rule.ts\nindex 6f583e1..c905b63 100644\n--- a/packages/extension-horizontal-rule/src/horizontal-rule.ts\n+++ b/packages/extension-horizontal-rule/src/horizontal-rule.ts\n@@ -49,15 +49,14 @@ export const HorizontalRule = Node.create<HorizontalRuleOptions>({\n // set cursor after horizontal rule\n .command(({ tr, dispatch }) => {\n if (dispatch) {\n- const { parent, pos } = tr.selection.$from\n- const posAfter = pos + 1\n- const nodeAfter = tr.doc.nodeAt(posAfter)\n+ const { $to } = tr.selection\n+ const posAfter = $to.end()\n \n- if (nodeAfter) {\n- tr.setSelection(TextSelection.create(tr.doc, posAfter))\n+ if ($to.nodeAfter) {\n+ tr.setSelection(TextSelection.create(tr.doc, $to.pos))\n } else {\n // add node after horizontal rule if it\u2019s the end of the document\n- const node = parent.type.contentMatch.defaultType?.create()\n+ const node = $to.parent.type.contentMatch.defaultType?.create()\n \n if (node) {\n tr.insert(posAfter, node)\n"]
4
["10bdcb452ff9d2b884d45a9c43a4b8a20fc4a883", "e0a977b2d316e7612b5d72cb02cd7d78e75dbc55", "a8d1a60fd48d3fbd76d4271987a1b0f538d498f1", "34d80114704679118e9bb6058e0d6c7aa03fd4b5"]
["ci", "build", "test", "fix"]
add important to override paragraphs in items,implement array flatten support,handle default_branch_monthly_cost having no cost Closes https://github.com/infracost/infracost-gh-action/issues/17,do not query all networks
["diff --git a/packages/core/src/components/text/text.ios.scss b/packages/core/src/components/text/text.ios.scss\nindex a3c58e2..2a020ab 100644\n--- a/packages/core/src/components/text/text.ios.scss\n+++ b/packages/core/src/components/text/text.ios.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-ios) {\n \n .text-ios-#{$color-name},\n- .text-ios-#{$color-name} a {\n- color: $color-base;\n+ .text-ios-#{$color-name} a,\n+ .text-ios-#{$color-name} p {\n+ color: $color-base !important\n }\n \n }\ndiff --git a/packages/core/src/components/text/text.md.scss b/packages/core/src/components/text/text.md.scss\nindex b397acb..050af1a 100644\n--- a/packages/core/src/components/text/text.md.scss\n+++ b/packages/core/src/components/text/text.md.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-md) {\n \n .text-md-#{$color-name},\n- .text-md-#{$color-name} a {\n- color: $color-base;\n+ .text-md-#{$color-name} a,\n+ .text-md-#{$color-name} p {\n+ color: $color-base !important;\n }\n \n }\n", "diff --git a/ibis/backends/snowflake/registry.py b/ibis/backends/snowflake/registry.py\nindex 2373dd7..4ce03b0 100644\n--- a/ibis/backends/snowflake/registry.py\n+++ b/ibis/backends/snowflake/registry.py\n@@ -422,6 +422,7 @@ operation_registry.update(\n ops.ArrayZip: _array_zip,\n ops.ArraySort: unary(sa.func.array_sort),\n ops.ArrayRepeat: fixed_arity(sa.func.ibis_udfs.public.array_repeat, 2),\n+ ops.ArrayFlatten: fixed_arity(sa.func.array_flatten, 1),\n ops.StringSplit: fixed_arity(sa.func.split, 2),\n # snowflake typeof only accepts VARIANT, so we cast\n ops.TypeOf: unary(lambda arg: sa.func.typeof(sa.func.to_variant(arg))),\n", "diff --git a/scripts/ci/diff.sh b/scripts/ci/diff.sh\nindex 7472273..fa48e4b 100755\n--- a/scripts/ci/diff.sh\n+++ b/scripts/ci/diff.sh\n@@ -112,7 +112,12 @@ echo \"$default_branch_output\" > default_branch_infracost.txt\n default_branch_monthly_cost=$(cat default_branch_infracost.txt | awk '/OVERALL TOTAL/ { gsub(\",\",\"\"); printf(\"%.2f\",$NF) }')\n echo \"::set-output name=default_branch_monthly_cost::$default_branch_monthly_cost\"\n \n-percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+if [ $(echo \"$default_branch_monthly_cost > 0\" | bc -l) = 1 ]; then\n+ percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+else\n+ echo \"Default branch has no cost, setting percent_diff=100 to force a comment\"\n+ percent_diff=100\n+fi\n absolute_percent_diff=$(echo $percent_diff | tr -d -)\n \n if [ $(echo \"$absolute_percent_diff > $percentage_threshold\" | bc -l) = 1 ]; then\n", "diff --git a/src/environment/windows_win32.go b/src/environment/windows_win32.go\nindex be0c7b5..b90e0ff 100644\n--- a/src/environment/windows_win32.go\n+++ b/src/environment/windows_win32.go\n@@ -203,7 +203,6 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \tvar pIFTable2 *MIN_IF_TABLE2\n \t_, _, _ = hGetIfTable2.Call(uintptr(unsafe.Pointer(&pIFTable2)))\n \n-\tSSIDs, _ := env.getAllWifiSSID()\n \tnetworks := make([]*Connection, 0)\n \n \tfor i := 0; i < int(pIFTable2.NumEntries); i++ {\n@@ -220,11 +219,13 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t}\n \n \t\tvar connectionType ConnectionType\n+\t\tvar ssid string\n \t\tswitch networkInterface.Type {\n \t\tcase 6:\n \t\t\tconnectionType = ETHERNET\n \t\tcase 71:\n \t\t\tconnectionType = WIFI\n+\t\t\tssid = env.getWiFiSSID(networkInterface.InterfaceGUID)\n \t\tcase 237, 234, 244:\n \t\t\tconnectionType = CELLULAR\n \t\t}\n@@ -243,10 +244,7 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t\tName: description, // we want a relatable name, alias isn't that\n \t\t\tTransmitRate: networkInterface.TransmitLinkSpeed,\n \t\t\tReceiveRate: networkInterface.ReceiveLinkSpeed,\n-\t\t}\n-\n-\t\tif SSID, OK := SSIDs[network.Name]; OK {\n-\t\t\tnetwork.SSID = SSID\n+\t\t\tSSID: ssid,\n \t\t}\n \n \t\tnetworks = append(networks, network)\n@@ -322,13 +320,21 @@ type MIB_IF_ROW2 struct { //nolint: revive\n \tOutQLen uint64\n }\n \n-func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n+var (\n+\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n+\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n+\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n+\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n+)\n+\n+func (env *ShellEnvironment) getWiFiSSID(guid windows.GUID) string {\n+\t// Query wifi connection state\n \tvar pdwNegotiatedVersion uint32\n \tvar phClientHandle uint32\n \te, _, err := hWlanOpenHandle.Call(uintptr(uint32(2)), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&pdwNegotiatedVersion)), uintptr(unsafe.Pointer(&phClientHandle)))\n \tif e != 0 {\n \t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n+\t\treturn \"\"\n \t}\n \n \t// defer closing handle\n@@ -336,42 +342,11 @@ func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n \t\t_, _, _ = hWlanCloseHandle.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)))\n \t}()\n \n-\tssid := make(map[string]string)\n-\t// list interfaces\n-\tvar interfaceList *WLAN_INTERFACE_INFO_LIST\n-\te, _, err = hWlanEnumInterfaces.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&interfaceList)))\n-\tif e != 0 {\n-\t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n-\t}\n-\n-\t// use first interface that is connected\n-\tnumberOfInterfaces := int(interfaceList.dwNumberOfItems)\n-\tinfoSize := unsafe.Sizeof(interfaceList.InterfaceInfo[0])\n-\tfor i := 0; i < numberOfInterfaces; i++ {\n-\t\tnetwork := (*WLAN_INTERFACE_INFO)(unsafe.Pointer(uintptr(unsafe.Pointer(&interfaceList.InterfaceInfo[0])) + uintptr(i)*infoSize))\n-\t\tif network.isState == 1 {\n-\t\t\twifiInterface := strings.TrimRight(string(utf16.Decode(network.strInterfaceDescription[:])), \"\\x00\")\n-\t\t\tssid[wifiInterface] = env.getWiFiSSID(network, phClientHandle)\n-\t\t}\n-\t}\n-\treturn ssid, nil\n-}\n-\n-var (\n-\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n-\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n-\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n-\thWlanEnumInterfaces = wlanapi.NewProc(\"WlanEnumInterfaces\")\n-\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n-)\n-\n-func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHandle uint32) string {\n-\t// Query wifi connection state\n \tvar dataSize uint16\n \tvar wlanAttr *WLAN_CONNECTION_ATTRIBUTES\n-\te, _, _ := hWlanQueryInterface.Call(uintptr(clientHandle),\n-\t\tuintptr(unsafe.Pointer(&network.InterfaceGuid)),\n+\n+\te, _, _ = hWlanQueryInterface.Call(uintptr(phClientHandle),\n+\t\tuintptr(unsafe.Pointer(&guid)),\n \t\tuintptr(7), // wlan_intf_opcode_current_connection\n \t\tuintptr(unsafe.Pointer(nil)),\n \t\tuintptr(unsafe.Pointer(&dataSize)),\n@@ -389,18 +364,6 @@ func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHan\n \treturn string(ssid.ucSSID[0:ssid.uSSIDLength])\n }\n \n-type WLAN_INTERFACE_INFO_LIST struct { //nolint: revive\n-\tdwNumberOfItems uint32\n-\tdwIndex uint32 //nolint: unused\n-\tInterfaceInfo [256]WLAN_INTERFACE_INFO\n-}\n-\n-type WLAN_INTERFACE_INFO struct { //nolint: revive\n-\tInterfaceGuid syscall.GUID //nolint: revive\n-\tstrInterfaceDescription [256]uint16\n-\tisState uint32\n-}\n-\n type WLAN_CONNECTION_ATTRIBUTES struct { //nolint: revive\n \tisState uint32 //nolint: unused\n \twlanConnectionMode uint32 //nolint: unused\n"]
4
["7ab363f7ba2807b3eb9895e47f4fcd058f43ae5e", "d3c754f09502be979e5dcc79f968b15052590bd0", "9474f58b44a35321e9157ca9890c589a7b3729b2", "8a9a022baa15befc325f87892c6bdae25b35bc33"]
["test", "feat", "fix", "refactor"]
pin version of actionlint used,support document.html,fix the contact icon in the e2e test references #6364,extract _value expr from predicate
["diff --git a/.github/workflows/introspect.yml b/.github/workflows/introspect.yml\nindex b6d9125..82d22a5 100644\n--- a/.github/workflows/introspect.yml\n+++ b/.github/workflows/introspect.yml\n@@ -25,5 +25,5 @@ jobs:\n # From https://github.com/rhysd/actionlint/blob/main/docs/usage.md#use-actionlint-on-github-actions\n - name: Check workflow files\n run: |\n- bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/590d3bd9dde0c91f7a66071d40eb84716526e5a6/scripts/download-actionlint.bash)\n+ bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/590d3bd9dde0c91f7a66071d40eb84716526e5a6/scripts/download-actionlint.bash) 1.6.25\n ./actionlint -color -shellcheck=\"\"\n", "diff --git a/packages/nuxt3/src/builder/builder.ts b/packages/nuxt3/src/builder/builder.ts\nindex a24bd88..ecc22ef 100644\n--- a/packages/nuxt3/src/builder/builder.ts\n+++ b/packages/nuxt3/src/builder/builder.ts\n@@ -3,6 +3,7 @@ import fsExtra from 'fs-extra'\n import { debounce } from 'lodash'\n import { BundleBuilder } from 'src/webpack'\n import { Nuxt } from '../core'\n+import { DeterminedGlobals, determineGlobals } from '../utils'\n import {\n templateData,\n compileTemplates,\n@@ -15,12 +16,14 @@ import Ignore from './ignore'\n \n export class Builder {\n nuxt: Nuxt\n+ globals: DeterminedGlobals\n ignore: Ignore\n- app: NuxtApp\n templates: NuxtTemplate[]\n+ app: NuxtApp\n \n constructor (nuxt) {\n this.nuxt = nuxt\n+ this.globals = determineGlobals(nuxt.options.globalName, nuxt.options.globals)\n this.ignore = new Ignore({\n rootDir: nuxt.options.srcDir,\n ignoreArray: nuxt.options.ignore.concat(\n@@ -32,6 +35,10 @@ export class Builder {\n build () {\n return build(this)\n }\n+\n+ close () {\n+ // TODO: close watchers\n+ }\n }\n \n // Extends VueRouter\ndiff --git a/packages/nuxt3/src/builder/template.ts b/packages/nuxt3/src/builder/template.ts\nindex 63a9115..fe09f16 100644\n--- a/packages/nuxt3/src/builder/template.ts\n+++ b/packages/nuxt3/src/builder/template.ts\n@@ -11,6 +11,7 @@ export interface NuxtTemplate {\n \n export function templateData (builder) {\n return {\n+ globals: builder.globals,\n app: builder.app\n }\n }\ndiff --git a/packages/nuxt3/src/builder/watch.ts b/packages/nuxt3/src/builder/watch.ts\nindex b4d1415..d148fec 100644\n--- a/packages/nuxt3/src/builder/watch.ts\n+++ b/packages/nuxt3/src/builder/watch.ts\n@@ -38,7 +38,8 @@ export function createWatcher (\n return {\n watchAll,\n watch,\n- debug\n+ debug,\n+ close: () => watcher.close()\n }\n }\n \ndiff --git a/packages/nuxt3/src/config/options.ts b/packages/nuxt3/src/config/options.ts\nindex 5aac8ac..6e7f93c 100644\n--- a/packages/nuxt3/src/config/options.ts\n+++ b/packages/nuxt3/src/config/options.ts\n@@ -12,7 +12,7 @@ import { DefaultConfiguration, defaultNuxtConfigFile, getDefaultNuxtConfig } fro\n import { deleteProp, mergeConfigs, setProp, overrideProp, Optional } from './transformers'\n \n interface InputConfiguration {\n- appTemplatePath?: string\n+ documentPath?: string\n layoutTransition?: string | DefaultConfiguration['layoutTransition']\n loading?: true | false | DefaultConfiguration['loading']\n manifest?: {\n@@ -197,13 +197,16 @@ function normalizeConfig (_options: CliConfiguration) {\n .concat(options.extensions))\n \n // If app.html is defined, set the template path to the user template\n- if (options.appTemplatePath === undefined) {\n- options.appTemplatePath = path.resolve(options.buildDir, 'views/app.template.html')\n- if (fs.existsSync(path.join(options.srcDir, 'app.html'))) {\n- options.appTemplatePath = path.join(options.srcDir, 'app.html')\n+ if (options.documentPath === undefined) {\n+ options.documentPath = path.resolve(options.buildDir, 'views/document.template.html')\n+ const userDocumentPath = path.join(options.srcDir, 'document.html')\n+ if (fs.existsSync(userDocumentPath)) {\n+ options.documentPath = userDocumentPath\n+ } else {\n+ options.watch.push(userDocumentPath)\n }\n } else {\n- options.appTemplatePath = path.resolve(options.srcDir, options.appTemplatePath)\n+ options.documentPath = path.resolve(options.srcDir, options.documentPath)\n }\n \n overrideProp(options.build, 'publicPath', options.build.publicPath.replace(/([^/])$/, '$1/'))\ndiff --git a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\nindex 3e3ce2d..482bd6b 100644\n--- a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n+++ b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n@@ -96,6 +96,9 @@ export default class SSRRenderer extends BaseRenderer {\n // Call Vue renderer renderToString\n let APP = await this.vueRenderer.renderToString(renderContext)\n \n+ // Wrap with Nuxt id\n+ APP = `<div id=\"${this.serverContext.globals.id}\">${APP}</div>`\n+\n // Call render:done in app\n await renderContext.nuxt.hooks.callHook('vue-renderer:done')\n \ndiff --git a/packages/nuxt3/src/webpack/configs/client.ts b/packages/nuxt3/src/webpack/configs/client.ts\nindex a257948..4fb35e0 100644\n--- a/packages/nuxt3/src/webpack/configs/client.ts\n+++ b/packages/nuxt3/src/webpack/configs/client.ts\n@@ -94,7 +94,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.ssr.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: false // Resources will be injected using bundleRenderer\n })\n@@ -104,7 +104,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.spa.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: true\n })\n", "diff --git a/ionic/components/toolbar/test/colors/main.html b/ionic/components/toolbar/test/colors/main.html\nindex 24e48ca..73fe306 100644\n--- a/ionic/components/toolbar/test/colors/main.html\n+++ b/ionic/components/toolbar/test/colors/main.html\n@@ -9,8 +9,8 @@\n <ion-icon name=\"menu\"></ion-icon>\n </button>\n <ion-buttons start>\n- <button>\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button showWhen=\"ios\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button>\n <ion-icon name=\"search\"></ion-icon>\n@@ -27,8 +27,8 @@\n \n <ion-toolbar primary>\n <ion-buttons start>\n- <button>\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button showWhen=\"ios\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button>\n <ion-icon name=\"search\"></ion-icon>\n@@ -45,8 +45,8 @@\n \n <ion-toolbar primary>\n <ion-buttons start>\n- <button class=\"activated\">\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button cla showWhen=\"ios\"ss=\"activated\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button class=\"activated\">\n <ion-icon name=\"search\"></ion-icon>\n", "diff --git a/predicate/src/delete_predicate.rs b/predicate/src/delete_predicate.rs\nindex 02e679a..6368df3 100644\n--- a/predicate/src/delete_predicate.rs\n+++ b/predicate/src/delete_predicate.rs\n@@ -120,6 +120,7 @@ impl From<DeletePredicate> for crate::predicate::Predicate {\n partition_key: None,\n range: Some(pred.range),\n exprs: pred.exprs.into_iter().map(|expr| expr.into()).collect(),\n+ value_expr: vec![],\n }\n }\n }\ndiff --git a/predicate/src/predicate.rs b/predicate/src/predicate.rs\nindex d7f3b62..2aa8fdf 100644\n--- a/predicate/src/predicate.rs\n+++ b/predicate/src/predicate.rs\n@@ -11,7 +11,7 @@ use std::{\n use data_types::timestamp::TimestampRange;\n use datafusion::{\n error::DataFusionError,\n- logical_plan::{col, lit_timestamp_nano, Expr, Operator},\n+ logical_plan::{col, lit_timestamp_nano, Column, Expr, Operator},\n optimizer::utils,\n };\n use datafusion_util::{make_range_expr, AndExprBuilder};\n@@ -26,6 +26,7 @@ pub const EMPTY_PREDICATE: Predicate = Predicate {\n exprs: vec![],\n range: None,\n partition_key: None,\n+ value_expr: vec![],\n };\n \n #[derive(Debug, Clone, Copy)]\n@@ -72,6 +73,11 @@ pub struct Predicate {\n /// these expressions should be returned. Other rows are excluded\n /// from the results.\n pub exprs: Vec<Expr>,\n+\n+ /// Optional arbitrary predicates on the special `_value` column. These\n+ /// expressions are applied to `field_columns` projections in the form of\n+ /// `CASE` statement conditions.\n+ pub value_expr: Vec<BinaryExpr>,\n }\n \n impl Predicate {\n@@ -469,6 +475,14 @@ impl PredicateBuilder {\n }\n }\n \n+// A representation of the `BinaryExpr` variant of a Datafusion expression.\n+#[derive(Clone, Debug, PartialEq, PartialOrd)]\n+pub struct BinaryExpr {\n+ pub left: Column,\n+ pub op: Operator,\n+ pub right: Expr,\n+}\n+\n #[cfg(test)]\n mod tests {\n use super::*;\ndiff --git a/query/src/frontend/influxrpc.rs b/query/src/frontend/influxrpc.rs\nindex 5ac7a2e..70c43f2 100644\n--- a/query/src/frontend/influxrpc.rs\n+++ b/query/src/frontend/influxrpc.rs\n@@ -9,8 +9,7 @@ use data_types::chunk_metadata::ChunkId;\n use datafusion::{\n error::{DataFusionError, Result as DatafusionResult},\n logical_plan::{\n- binary_expr, lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder,\n- Operator,\n+ lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder, Operator,\n },\n optimizer::utils::expr_to_columns,\n prelude::col,\n@@ -20,7 +19,7 @@ use datafusion_util::AsExpr;\n \n use hashbrown::{HashMap, HashSet};\n use observability_deps::tracing::{debug, trace};\n-use predicate::predicate::{Predicate, PredicateMatch};\n+use predicate::predicate::{BinaryExpr, Predicate, PredicateMatch};\n use schema::selection::Selection;\n use schema::{InfluxColumnType, Schema, TIME_COLUMN_NAME};\n use snafu::{ensure, OptionExt, ResultExt, Snafu};\n@@ -243,7 +242,6 @@ impl InfluxRpcPlanner {\n // and which chunks needs full plan and group them into their table\n for chunk in database.chunks(normalizer.unnormalized()) {\n let table_name = chunk.table_name();\n- let schema = chunk.schema();\n \n // Table is already in the returned table list, no longer needs to discover it from other chunks\n if builder.contains_meta_data_table(table_name.to_string()) {\n@@ -260,7 +258,7 @@ impl InfluxRpcPlanner {\n } else {\n // See if we can have enough info from the chunk's meta data to answer\n // that this table participates in the request\n- let predicate = normalizer.normalized(table_name, schema);\n+ let predicate = normalizer.normalized(table_name);\n //\n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -346,7 +344,7 @@ impl InfluxRpcPlanner {\n let mut do_full_plan = chunk.has_delete_predicates();\n \n let table_name = chunk.table_name();\n- let predicate = normalizer.normalized(table_name, chunk.schema());\n+ let predicate = normalizer.normalized(table_name);\n \n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -474,7 +472,7 @@ impl InfluxRpcPlanner {\n let mut do_full_plan = chunk.has_delete_predicates();\n \n let table_name = chunk.table_name();\n- let predicate = normalizer.normalized(table_name, chunk.schema());\n+ let predicate = normalizer.normalized(table_name);\n \n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -821,7 +819,7 @@ impl InfluxRpcPlanner {\n {\n let mut table_chunks = BTreeMap::new();\n for chunk in chunks {\n- let predicate = normalizer.normalized(chunk.table_name(), chunk.schema());\n+ let predicate = normalizer.normalized(chunk.table_name());\n // Try and apply the predicate using only metadata\n let pred_result = chunk\n .apply_predicate_to_metadata(&predicate)\n@@ -1040,9 +1038,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.as_ref();\n- let scan_and_filter =\n- self.scan_and_filter(table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1152,9 +1149,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.into();\n- let scan_and_filter =\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(&table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(&table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1263,9 +1259,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.into();\n- let scan_and_filter =\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(&table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(&table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1342,7 +1337,7 @@ impl InfluxRpcPlanner {\n where\n C: QueryChunk + 'static,\n {\n- let predicate = normalizer.normalized(table_name, Arc::clone(&schema));\n+ let predicate = normalizer.normalized(table_name);\n \n // Scan all columns to begin with (DataFusion projection\n // push-down optimization will prune out unneeded columns later)\n@@ -1701,13 +1696,13 @@ impl PredicateNormalizer {\n \n /// Return a reference to a predicate specialized for `table_name` based on\n /// its `schema`.\n- fn normalized(&mut self, table_name: &str, schema: Arc<Schema>) -> Arc<Predicate> {\n+ fn normalized(&mut self, table_name: &str) -> Arc<Predicate> {\n if let Some(normalized_predicate) = self.normalized.get(table_name) {\n return normalized_predicate.inner();\n }\n \n let normalized_predicate =\n- TableNormalizedPredicate::new(table_name, schema, self.unnormalized.clone());\n+ TableNormalizedPredicate::new(table_name, self.unnormalized.clone());\n \n self.normalized\n .entry(table_name.to_string())\n@@ -1752,13 +1747,18 @@ struct TableNormalizedPredicate {\n }\n \n impl TableNormalizedPredicate {\n- fn new(table_name: &str, schema: Arc<Schema>, mut inner: Predicate) -> Self {\n+ fn new(table_name: &str, mut inner: Predicate) -> Self {\n let mut field_projections = BTreeSet::new();\n+ let mut field_value_exprs = vec![];\n+\n inner.exprs = inner\n .exprs\n .into_iter()\n .map(|e| rewrite_measurement_references(table_name, e))\n- .map(|e| rewrite_field_value_references(Arc::clone(&schema), e))\n+ // Rewrite any references to `_value = some_value` to literal true values.\n+ // Keeps track of these expressions, which can then be used to\n+ // augment field projections with conditions using `CASE` statements.\n+ .map(|e| rewrite_field_value_references(&mut field_value_exprs, e))\n .map(|e| {\n // Rewrite any references to `_field = a_field_name` with a literal true\n // and keep track of referenced field names to add to the field\n@@ -1766,6 +1766,8 @@ impl TableNormalizedPredicate {\n rewrite_field_column_references(&mut field_projections, e)\n })\n .collect::<Vec<_>>();\n+ // Store any field value (`_value`) expressions on the `Predicate`.\n+ inner.value_expr = field_value_exprs;\n \n if !field_projections.is_empty() {\n match &mut inner.field_columns {\n@@ -1811,23 +1813,19 @@ impl ExprRewriter for MeasurementRewriter<'_> {\n }\n }\n \n-/// Rewrites a predicate on `_value` to a disjunctive set of expressions on each\n-/// distinct field column in the table.\n-///\n-/// For example, the predicate `_value = 1.77` on a table with three field\n-/// columns would be rewritten to:\n-///\n-/// `(field1 = 1.77 OR field2 = 1.77 OR field3 = 1.77)`.\n-fn rewrite_field_value_references(schema: Arc<Schema>, expr: Expr) -> Expr {\n- let mut rewriter = FieldValueRewriter { schema };\n+/// Rewrites an expression on `_value` as a boolean true literal, pushing any\n+/// encountered expressions onto `value_exprs` so they can be moved onto column\n+/// projections.\n+fn rewrite_field_value_references(value_exprs: &mut Vec<BinaryExpr>, expr: Expr) -> Expr {\n+ let mut rewriter = FieldValueRewriter { value_exprs };\n expr.rewrite(&mut rewriter).expect(\"rewrite is infallible\")\n }\n \n-struct FieldValueRewriter {\n- schema: Arc<Schema>,\n+struct FieldValueRewriter<'a> {\n+ value_exprs: &'a mut Vec<BinaryExpr>,\n }\n \n-impl ExprRewriter for FieldValueRewriter {\n+impl<'a> ExprRewriter for FieldValueRewriter<'a> {\n fn mutate(&mut self, expr: Expr) -> DatafusionResult<Expr> {\n Ok(match expr {\n Expr::BinaryExpr {\n@@ -1836,21 +1834,16 @@ impl ExprRewriter for FieldValueRewriter {\n ref right,\n } => {\n if let Expr::Column(inner) = &**left {\n- if inner.name != VALUE_COLUMN_NAME {\n- return Ok(expr); // column name not `_value`.\n+ if inner.name == VALUE_COLUMN_NAME {\n+ self.value_exprs.push(BinaryExpr {\n+ left: inner.to_owned(),\n+ op,\n+ right: right.as_expr(),\n+ });\n+ return Ok(Expr::Literal(ScalarValue::Boolean(Some(true))));\n }\n-\n- // build a disjunctive expression using binary expressions\n- // for each field column and the original expression's\n- // operator and rhs.\n- self.schema\n- .fields_iter()\n- .map(|field| binary_expr(col(field.name()), op, *right.clone()))\n- .reduce(|a, b| a.or(b))\n- .expect(\"at least one field column\")\n- } else {\n- expr\n }\n+ expr\n }\n _ => expr,\n })\n@@ -1918,7 +1911,7 @@ pub fn schema_has_all_expr_columns(schema: &Schema, expr: &Expr) -> bool {\n \n #[cfg(test)]\n mod tests {\n- use datafusion::logical_plan::Operator;\n+ use datafusion::logical_plan::{binary_expr, Operator};\n use schema::builder::SchemaBuilder;\n \n use super::*;\n@@ -1958,56 +1951,57 @@ mod tests {\n \n #[test]\n fn test_field_value_rewriter() {\n- let schema = SchemaBuilder::new()\n- .tag(\"t1\")\n- .tag(\"t2\")\n- .field(\"f1\", DataType::Float64)\n- .field(\"f2\", DataType::Float64)\n- .timestamp()\n- .build()\n- .unwrap();\n-\n let mut rewriter = FieldValueRewriter {\n- schema: Arc::new(schema),\n+ value_exprs: &mut vec![],\n };\n \n let cases = vec![\n (\n binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n+ vec![],\n ),\n- (col(\"t2\"), col(\"t2\")),\n+ (col(\"t2\"), col(\"t2\"), vec![]),\n (\n binary_expr(col(VALUE_COLUMN_NAME), Operator::Eq, lit(1.82)),\n- //\n- // _value = 1.82 -> f1 = (1.82 OR f2 = 1.82)\n- //\n- binary_expr(\n- binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n- Operator::Or,\n- binary_expr(col(\"f2\"), Operator::Eq, lit(1.82)),\n- ),\n+ // _value = 1.82 -> true\n+ lit(true),\n+ vec![BinaryExpr {\n+ left: Column {\n+ relation: None,\n+ name: VALUE_COLUMN_NAME.into(),\n+ },\n+ op: Operator::Eq,\n+ right: lit(1.82),\n+ }],\n ),\n ];\n \n- for (input, exp) in cases {\n+ for (input, exp, mut value_exprs) in cases {\n let rewritten = input.rewrite(&mut rewriter).unwrap();\n assert_eq!(rewritten, exp);\n+ assert_eq!(rewriter.value_exprs, &mut value_exprs);\n }\n \n // Test case with single field.\n- let schema = SchemaBuilder::new()\n- .field(\"f1\", DataType::Float64)\n- .timestamp()\n- .build()\n- .unwrap();\n let mut rewriter = FieldValueRewriter {\n- schema: Arc::new(schema),\n+ value_exprs: &mut vec![],\n };\n \n let input = binary_expr(col(VALUE_COLUMN_NAME), Operator::Gt, lit(1.88));\n let rewritten = input.rewrite(&mut rewriter).unwrap();\n- assert_eq!(rewritten, binary_expr(col(\"f1\"), Operator::Gt, lit(1.88)));\n+ assert_eq!(rewritten, lit(true));\n+ assert_eq!(\n+ rewriter.value_exprs,\n+ &mut vec![BinaryExpr {\n+ left: Column {\n+ relation: None,\n+ name: VALUE_COLUMN_NAME.into(),\n+ },\n+ op: Operator::Gt,\n+ right: lit(1.88),\n+ }]\n+ );\n }\n \n #[test]\n"]
4
["b702adc245f679ae20d84de39f0d63b14aabed5d", "09476134eeeb12c025618919ab9a795a680a9b30", "90d4c1d5bcc9f2dce6e1da0cb953f04f46fb1380", "0cb9751b0a1bdd8d2c88b45d4366e760d6b1bbed"]
["ci", "feat", "test", "refactor"]
change tests to depend on BrokerContext,`worktree::encode_to_worktree()` to turn UTf-8 into the worktree encoding.,filters for Rating Signed-off-by: Raju Udava <[email protected]>,update version (v0.6.18)
["diff --git a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\nindex fe4e42d..37c7066 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n@@ -7,20 +7,14 @@\n */\n package io.camunda.zeebe.broker;\n \n-import io.atomix.cluster.AtomixCluster;\n import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupContextImpl;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupProcess;\n-import io.camunda.zeebe.broker.clustering.ClusterServices;\n import io.camunda.zeebe.broker.exporter.repo.ExporterLoadException;\n import io.camunda.zeebe.broker.exporter.repo.ExporterRepository;\n-import io.camunda.zeebe.broker.partitioning.PartitionManager;\n-import io.camunda.zeebe.broker.system.EmbeddedGatewayService;\n import io.camunda.zeebe.broker.system.SystemContext;\n import io.camunda.zeebe.broker.system.configuration.BrokerCfg;\n-import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.broker.system.monitoring.BrokerHealthCheckService;\n-import io.camunda.zeebe.broker.system.monitoring.DiskSpaceUsageMonitor;\n import io.camunda.zeebe.protocol.impl.encoding.BrokerInfo;\n import io.camunda.zeebe.util.LogUtil;\n import io.camunda.zeebe.util.VersionUtil;\n@@ -184,35 +178,15 @@ public final class Broker implements AutoCloseable {\n }\n \n // only used for tests\n- public EmbeddedGatewayService getEmbeddedGatewayService() {\n- return brokerContext.getEmbeddedGatewayService();\n- }\n-\n- public AtomixCluster getAtomixCluster() {\n- return brokerContext.getAtomixCluster();\n- }\n-\n- public ClusterServices getClusterServices() {\n- return brokerContext.getClusterServices();\n- }\n-\n- public DiskSpaceUsageMonitor getDiskSpaceUsageMonitor() {\n- return brokerContext.getDiskSpaceUsageMonitor();\n- }\n-\n- public BrokerAdminService getBrokerAdminService() {\n- return brokerContext.getBrokerAdminService();\n+ public BrokerContext getBrokerContext() {\n+ return brokerContext;\n }\n \n+ // only used for tests\n public SystemContext getSystemContext() {\n return systemContext;\n }\n \n- public PartitionManager getPartitionManager() {\n- return brokerContext.getPartitionManager();\n- }\n- // only used for tests\n-\n /**\n * Temporary helper object. This object is needed during the transition of broker startup/shutdown\n * steps to the new concept. Afterwards, the expectation is that this object will merge with the\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\nindex bda5170..1accbc1 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n@@ -45,11 +45,12 @@ public class BrokerSnapshotTest {\n (RaftPartition)\n brokerRule\n .getBroker()\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(PartitionId.from(PartitionManagerImpl.GROUP_NAME, PARTITION_ID));\n journalReader = raftPartition.getServer().openReader();\n- brokerAdminService = brokerRule.getBroker().getBrokerAdminService();\n+ brokerAdminService = brokerRule.getBroker().getBrokerContext().getBrokerAdminService();\n \n final String contactPoint = NetUtil.toSocketAddressString(brokerRule.getGatewayAddress());\n final ZeebeClientBuilder zeebeClientBuilder =\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\nindex e98e7d2..a831bfe 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n@@ -173,11 +173,11 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n }\n \n public ClusterServices getClusterServices() {\n- return broker.getClusterServices();\n+ return broker.getBrokerContext().getClusterServices();\n }\n \n public AtomixCluster getAtomixCluster() {\n- return broker.getAtomixCluster();\n+ return broker.getBrokerContext().getAtomixCluster();\n }\n \n public InetSocketAddress getGatewayAddress() {\n@@ -245,7 +245,8 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\nindex 890b596..8561cf1 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n@@ -29,6 +29,7 @@ import io.atomix.utils.net.Address;\n import io.camunda.zeebe.broker.Broker;\n import io.camunda.zeebe.broker.PartitionListener;\n import io.camunda.zeebe.broker.SpringBrokerBridge;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.exporter.stream.ExporterDirectorContext;\n import io.camunda.zeebe.broker.partitioning.PartitionManagerImpl;\n import io.camunda.zeebe.broker.system.SystemContext;\n@@ -602,11 +603,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void stepDown(final Broker broker, final int partitionId) {\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == partitionId)\n .map(RaftPartition.class::cast)\n@@ -617,14 +618,14 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void disconnect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).stop().join();\n ((NettyMessagingService) atomix.getMessagingService()).stop().join();\n }\n \n public void connect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).start().join();\n ((NettyMessagingService) atomix.getMessagingService()).start().join();\n@@ -666,11 +667,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n final var broker = brokers.get(expectedLeader);\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == START_PARTITION_ID)\n .map(RaftPartition.class::cast)\n@@ -775,14 +776,15 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void takeSnapshot(final Broker broker) {\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n }\n \n public void triggerAndWaitForSnapshots() {\n // Ensure that the exporter positions are distributed to the followers\n getClock().addTime(ExporterDirectorContext.DEFAULT_DISTRIBUTION_INTERVAL);\n getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::takeSnapshot);\n \n getBrokers()\n@@ -794,7 +796,7 @@ public final class ClusteringRule extends ExternalResource {\n .until(\n () -> {\n // Trigger snapshot again in case snapshot is not already taken\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n return getSnapshot(broker);\n },\n Optional::isPresent));\n@@ -831,7 +833,7 @@ public final class ClusteringRule extends ExternalResource {\n \n private Optional<SnapshotId> getSnapshot(final Broker broker, final int partitionId) {\n \n- final var partitions = broker.getBrokerAdminService().getPartitionStatus();\n+ final var partitions = broker.getBrokerContext().getBrokerAdminService().getPartitionStatus();\n final var partitionStatus = partitions.get(partitionId);\n \n return Optional.ofNullable(partitionStatus)\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\nindex f07961c..d46636b 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n@@ -88,6 +88,7 @@ public class DiskSpaceMonitoringFailOverTest {\n () ->\n clusteringRule\n .getBroker(newLeaderId)\n+ .getBrokerContext()\n .getBrokerAdminService()\n .getPartitionStatus()\n .get(1)\n@@ -96,7 +97,7 @@ public class DiskSpaceMonitoringFailOverTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\nindex 0a02a27..6e93cf9 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n@@ -165,7 +165,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -188,7 +188,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\nindex 9cef5a0..a487729 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n@@ -192,7 +192,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -215,7 +216,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\nindex 2d1e4f0..58f6f16 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n@@ -48,6 +48,7 @@ public class HealthMonitoringTest {\n final var raftPartition =\n (RaftPartition)\n leader\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\nindex 468f83c..7ff03be 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n@@ -11,6 +11,7 @@ import static org.assertj.core.api.Assertions.assertThat;\n \n import io.atomix.raft.RaftServer.Role;\n import io.camunda.zeebe.broker.Broker;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.it.clustering.ClusteringRule;\n@@ -48,7 +49,7 @@ public class BrokerAdminServiceClusterTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -61,7 +62,8 @@ public class BrokerAdminServiceClusterTest {\n // when\n final var followerStatus =\n followers.stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .map(BrokerAdminService::getPartitionStatus)\n .map(status -> status.get(1));\n \n@@ -94,7 +96,8 @@ public class BrokerAdminServiceClusterTest {\n \n // then\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(this::assertThatStatusContainsProcessedPositionInSnapshot);\n }\n \n@@ -102,7 +105,8 @@ public class BrokerAdminServiceClusterTest {\n public void shouldPauseAfterLeaderChange() {\n // given\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::pauseStreamProcessing);\n \n // when\n@@ -113,6 +117,7 @@ public class BrokerAdminServiceClusterTest {\n final var newLeaderAdminService =\n clusteringRule\n .getBroker(clusteringRule.getLeaderForPartition(1).getNodeId())\n+ .getBrokerContext()\n .getBrokerAdminService();\n assertStreamProcessorPhase(newLeaderAdminService, Phase.PAUSED);\n }\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\nindex 5160b50..2185329 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n@@ -41,7 +41,7 @@ public class BrokerAdminServiceTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -144,7 +144,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PAUSED);\n }\n \n@@ -161,7 +161,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PROCESSING);\n }\n \n@@ -176,7 +176,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.PAUSED);\n }\n \n@@ -193,7 +193,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.EXPORTING);\n }\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\nindex d6c8ab3..4582ad2 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n@@ -30,7 +30,7 @@ public class BrokerAdminServiceWithOutExporterTest {\n // given\n final var leader =\n clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- final var leaderAdminService = leader.getBrokerAdminService();\n+ final var leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n // when there are no exporters configured\n // then\n final var partitionStatus = leaderAdminService.getPartitionStatus().get(1);\ndiff --git a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\nindex 36bc0bf..d332201 100644\n--- a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n+++ b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n@@ -240,7 +240,8 @@ public class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \n", "diff --git a/gix-filter/src/worktree.rs b/gix-filter/src/worktree.rs\ndeleted file mode 100644\nindex cda7640..0000000\n--- a/gix-filter/src/worktree.rs\n+++ /dev/null\n@@ -1,132 +0,0 @@\n-//! Worktree encodings are powered by the `encoding_rs` crate, which has a narrower focus than the `iconv` library. Thus this implementation\n-//! is inherently more limited but will handle the common cases.\n-//! \n-//! Note that for encoding to legacy formats, [additional normalization steps](https://docs.rs/encoding_rs/0.8.32/encoding_rs/#preparing-text-for-the-encoders)\n-//! can be taken, which we do not yet take unless there is specific examples or problems to solve.\n-\n-use crate::clear_and_set_capacity;\n-use crate::worktree::encode_to_git::RoundTrip;\n-use encoding_rs::DecoderResult;\n-\n-///\n-pub mod encoding {\n- use bstr::BStr;\n- use encoding_rs::Encoding;\n-\n- ///\n- pub mod for_label {\n- use bstr::BString;\n-\n- /// The error returned by [for_label()][super::for_label()].\n- #[derive(Debug, thiserror::Error)]\n- #[allow(missing_docs)]\n- pub enum Error {\n- #[error(\"An encoding named '{name}' is not known\")]\n- Unknown { name: BString },\n- }\n- }\n- /// Try to produce a new `Encoding` for `label` or report an error if it is not known.\n- ///\n- /// ### Deviation\n- ///\n- /// * There is no special handling of UTF-16LE/BE with checks if data contains a BOM or not, like `git` as we don't expect to have\n- /// data available here.\n- /// * Special `-BOM` suffixed versions of `UTF-16` encodings are not supported.\n- pub fn for_label<'a>(label: impl Into<&'a BStr>) -> Result<&'static Encoding, for_label::Error> {\n- let mut label = label.into();\n- if label == \"latin-1\" {\n- label = \"ISO-8859-1\".into();\n- }\n- let enc =\n- Encoding::for_label(label.as_ref()).ok_or_else(|| for_label::Error::Unknown { name: label.into() })?;\n- Ok(enc)\n- }\n-}\n-\n-///\n-pub mod encode_to_git {\n- /// Whether or not to perform round-trip checks.\n- #[derive(Debug, Copy, Clone)]\n- pub enum RoundTrip {\n- /// Assure that we can losslessly convert the UTF-8 result back to the original encoding.\n- Validate,\n- /// Do not check if the encoding is round-trippable.\n- Ignore,\n- }\n-\n- /// The error returned by [`encode_to_git()][super::encode_to_git()].\n- #[derive(Debug, thiserror::Error)]\n- #[allow(missing_docs)]\n- pub enum Error {\n- #[error(\"Cannot convert input of {input_len} bytes to UTF-8 without overflowing\")]\n- Overflow { input_len: usize },\n- #[error(\"The input was malformed and could not be decoded as '{encoding}'\")]\n- Malformed { encoding: &'static str },\n- #[error(\"Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same\")]\n- RoundTrip {\n- src_encoding: &'static str,\n- dest_encoding: &'static str,\n- },\n- }\n-}\n-\n-/// Decode `src` according to `src_encoding` to `UTF-8` for storage in git.\n-/// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.\n-pub fn encode_to_git(\n- src: &[u8],\n- src_encoding: &'static encoding_rs::Encoding,\n- buf: &mut Vec<u8>,\n- round_trip: encode_to_git::RoundTrip,\n-) -> Result<(), encode_to_git::Error> {\n- let mut decoder = src_encoding.new_decoder_with_bom_removal();\n- let buf_len = decoder\n- .max_utf8_buffer_length_without_replacement(src.len())\n- .ok_or_else(|| encode_to_git::Error::Overflow { input_len: src.len() })?;\n- clear_and_set_capacity(buf, buf_len);\n- // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\n- // what is allocated. Later we will truncate to the amount of actually written bytes.\n- #[allow(unsafe_code)]\n- unsafe {\n- buf.set_len(buf_len);\n- }\n- let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);\n- match res {\n- DecoderResult::InputEmpty => {\n- assert!(\n- buf_len >= written,\n- \"encoding_rs estimates the maximum amount of bytes written correctly\"\n- );\n- assert_eq!(read, src.len(), \"input buffer should be fully consumed\");\n- // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\n- #[allow(unsafe_code)]\n- unsafe {\n- buf.set_len(written);\n- }\n- }\n- DecoderResult::OutputFull => {\n- unreachable!(\"we assure that the output buffer is big enough as per the encoder's estimate\")\n- }\n- DecoderResult::Malformed(_, _) => {\n- return Err(encode_to_git::Error::Malformed {\n- encoding: src_encoding.name(),\n- })\n- }\n- }\n-\n- match round_trip {\n- RoundTrip::Validate => {\n- // SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.\n- #[allow(unsafe_code)]\n- let str = unsafe { std::str::from_utf8_unchecked(&buf) };\n- let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);\n- if should_equal_src != src {\n- return Err(encode_to_git::Error::RoundTrip {\n- src_encoding: src_encoding.name(),\n- dest_encoding: \"UTF-8\",\n- });\n- }\n- }\n- RoundTrip::Ignore => {}\n- }\n- Ok(())\n-}\ndiff --git a/gix-filter/src/worktree/encode_to_git.rs b/gix-filter/src/worktree/encode_to_git.rs\nnew file mode 100644\nindex 0000000..da1bbf7\n--- /dev/null\n+++ b/gix-filter/src/worktree/encode_to_git.rs\n@@ -0,0 +1,90 @@\n+/// Whether or not to perform round-trip checks.\n+#[derive(Debug, Copy, Clone)]\n+pub enum RoundTrip {\n+ /// Assure that we can losslessly convert the UTF-8 result back to the original encoding.\n+ Validate,\n+ /// Do not check if the encoding is round-trippable.\n+ Ignore,\n+}\n+\n+/// The error returned by [`encode_to_git()][super::encode_to_git()].\n+#[derive(Debug, thiserror::Error)]\n+#[allow(missing_docs)]\n+pub enum Error {\n+ #[error(\"Cannot convert input of {input_len} bytes to UTF-8 without overflowing\")]\n+ Overflow { input_len: usize },\n+ #[error(\"The input was malformed and could not be decoded as '{encoding}'\")]\n+ Malformed { encoding: &'static str },\n+ #[error(\"Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same\")]\n+ RoundTrip {\n+ src_encoding: &'static str,\n+ dest_encoding: &'static str,\n+ },\n+}\n+\n+pub(crate) mod function {\n+ use super::{Error, RoundTrip};\n+ use crate::clear_and_set_capacity;\n+ use encoding_rs::DecoderResult;\n+\n+ /// Decode `src` according to `src_encoding` to `UTF-8` for storage in git and place it in `buf`.\n+ /// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.\n+ pub fn encode_to_git(\n+ src: &[u8],\n+ src_encoding: &'static encoding_rs::Encoding,\n+ buf: &mut Vec<u8>,\n+ round_trip: RoundTrip,\n+ ) -> Result<(), Error> {\n+ let mut decoder = src_encoding.new_decoder_with_bom_removal();\n+ let buf_len = decoder\n+ .max_utf8_buffer_length_without_replacement(src.len())\n+ .ok_or(Error::Overflow { input_len: src.len() })?;\n+ clear_and_set_capacity(buf, buf_len);\n+ // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\n+ // what is allocated. Later we will truncate to the amount of actually written bytes.\n+ #[allow(unsafe_code)]\n+ unsafe {\n+ buf.set_len(buf_len);\n+ }\n+ let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);\n+ match res {\n+ DecoderResult::InputEmpty => {\n+ assert!(\n+ buf_len >= written,\n+ \"encoding_rs estimates the maximum amount of bytes written correctly\"\n+ );\n+ assert_eq!(read, src.len(), \"input buffer should be fully consumed\");\n+ // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\n+ #[allow(unsafe_code)]\n+ unsafe {\n+ buf.set_len(written);\n+ }\n+ }\n+ DecoderResult::OutputFull => {\n+ unreachable!(\"we assure that the output buffer is big enough as per the encoder's estimate\")\n+ }\n+ DecoderResult::Malformed(_, _) => {\n+ return Err(Error::Malformed {\n+ encoding: src_encoding.name(),\n+ })\n+ }\n+ }\n+\n+ match round_trip {\n+ RoundTrip::Validate => {\n+ // SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.\n+ #[allow(unsafe_code)]\n+ let str = unsafe { std::str::from_utf8_unchecked(buf) };\n+ let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);\n+ if should_equal_src != src {\n+ return Err(Error::RoundTrip {\n+ src_encoding: src_encoding.name(),\n+ dest_encoding: \"UTF-8\",\n+ });\n+ }\n+ }\n+ RoundTrip::Ignore => {}\n+ }\n+ Ok(())\n+ }\n+}\ndiff --git a/gix-filter/src/worktree/encode_to_worktree.rs b/gix-filter/src/worktree/encode_to_worktree.rs\nnew file mode 100644\nindex 0000000..0a53419\n--- /dev/null\n+++ b/gix-filter/src/worktree/encode_to_worktree.rs\n@@ -0,0 +1,69 @@\n+/// The error returned by [`encode_to_worktree()][super::encode_to_worktree()].\n+#[derive(Debug, thiserror::Error)]\n+#[allow(missing_docs)]\n+pub enum Error {\n+ #[error(\"Cannot convert input of {input_len} UTF-8 bytes to target encoding without overflowing\")]\n+ Overflow { input_len: usize },\n+ #[error(\"Input was not UTF-8 encoded\")]\n+ InputAsUtf8(#[from] std::str::Utf8Error),\n+ #[error(\"The character '{character}' could not be mapped to the {worktree_encoding}\")]\n+ Unmappable {\n+ character: char,\n+ worktree_encoding: &'static str,\n+ },\n+}\n+\n+pub(crate) mod function {\n+ use super::Error;\n+ use crate::clear_and_set_capacity;\n+ use encoding_rs::EncoderResult;\n+\n+ /// Encode `src_utf8`, which is assumed to be UTF-8 encoded, according to `worktree_encoding` for placement in the working directory,\n+ /// and write it to `buf`, possibly resizing it.\n+ /// Note that the encoding is always applied, there is no conditional even if `worktree_encoding` and the `src` encoding are the same.\n+ pub fn encode_to_worktree(\n+ src_utf8: &[u8],\n+ worktree_encoding: &'static encoding_rs::Encoding,\n+ buf: &mut Vec<u8>,\n+ ) -> Result<(), Error> {\n+ let mut encoder = worktree_encoding.new_encoder();\n+ let buf_len = encoder\n+ .max_buffer_length_from_utf8_if_no_unmappables(src_utf8.len())\n+ .ok_or(Error::Overflow {\n+ input_len: src_utf8.len(),\n+ })?;\n+ clear_and_set_capacity(buf, buf_len);\n+ // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\n+ // what is allocated. Later we will truncate to the amount of actually written bytes.\n+ #[allow(unsafe_code)]\n+ unsafe {\n+ buf.set_len(buf_len);\n+ }\n+ let src = std::str::from_utf8(src_utf8)?;\n+ let (res, read, written) = encoder.encode_from_utf8_without_replacement(src, buf, true);\n+ match res {\n+ EncoderResult::InputEmpty => {\n+ assert!(\n+ buf_len >= written,\n+ \"encoding_rs estimates the maximum amount of bytes written correctly\"\n+ );\n+ assert_eq!(read, src_utf8.len(), \"input buffer should be fully consumed\");\n+ // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\n+ #[allow(unsafe_code)]\n+ unsafe {\n+ buf.set_len(written);\n+ }\n+ }\n+ EncoderResult::OutputFull => {\n+ unreachable!(\"we assure that the output buffer is big enough as per the encoder's estimate\")\n+ }\n+ EncoderResult::Unmappable(c) => {\n+ return Err(Error::Unmappable {\n+ worktree_encoding: worktree_encoding.name(),\n+ character: c,\n+ })\n+ }\n+ }\n+ Ok(())\n+ }\n+}\ndiff --git a/gix-filter/src/worktree/encoding.rs b/gix-filter/src/worktree/encoding.rs\nnew file mode 100644\nindex 0000000..0b75adc\n--- /dev/null\n+++ b/gix-filter/src/worktree/encoding.rs\n@@ -0,0 +1,31 @@\n+use bstr::BStr;\n+use encoding_rs::Encoding;\n+\n+///\n+pub mod for_label {\n+ use bstr::BString;\n+\n+ /// The error returned by [for_label()][super::for_label()].\n+ #[derive(Debug, thiserror::Error)]\n+ #[allow(missing_docs)]\n+ pub enum Error {\n+ #[error(\"An encoding named '{name}' is not known\")]\n+ Unknown { name: BString },\n+ }\n+}\n+\n+/// Try to produce a new `Encoding` for `label` or report an error if it is not known.\n+///\n+/// ### Deviation\n+///\n+/// * There is no special handling of UTF-16LE/BE with checks if data contains a BOM or not, like `git` as we don't expect to have\n+/// data available here.\n+/// * Special `-BOM` suffixed versions of `UTF-16` encodings are not supported.\n+pub fn for_label<'a>(label: impl Into<&'a BStr>) -> Result<&'static Encoding, for_label::Error> {\n+ let mut label = label.into();\n+ if label == \"latin-1\" {\n+ label = \"ISO-8859-1\".into();\n+ }\n+ let enc = Encoding::for_label(label.as_ref()).ok_or_else(|| for_label::Error::Unknown { name: label.into() })?;\n+ Ok(enc)\n+}\ndiff --git a/gix-filter/src/worktree/mod.rs b/gix-filter/src/worktree/mod.rs\nnew file mode 100644\nindex 0000000..3b13ea4\n--- /dev/null\n+++ b/gix-filter/src/worktree/mod.rs\n@@ -0,0 +1,16 @@\n+//! Worktree encodings are powered by the `encoding_rs` crate, which has a narrower focus than the `iconv` library. Thus this implementation\n+//! is inherently more limited but will handle the common cases.\n+//! \n+//! Note that for encoding to legacy formats, [additional normalization steps](https://docs.rs/encoding_rs/0.8.32/encoding_rs/#preparing-text-for-the-encoders)\n+//! can be taken, which we do not yet take unless there is specific examples or problems to solve.\n+\n+///\n+pub mod encoding;\n+\n+///\n+pub mod encode_to_git;\n+pub use encode_to_git::function::encode_to_git;\n+\n+///\n+pub mod encode_to_worktree;\n+pub use encode_to_worktree::function::encode_to_worktree;\ndiff --git a/gix-filter/tests/worktree/mod.rs b/gix-filter/tests/worktree/mod.rs\nindex cc2c6f1..1eb1a8e 100644\n--- a/gix-filter/tests/worktree/mod.rs\n+++ b/gix-filter/tests/worktree/mod.rs\n@@ -74,13 +74,28 @@ mod encode_to_git {\n let input = &b\"hello\"[..];\n for round_trip in [RoundTrip::Ignore, RoundTrip::Validate] {\n let mut buf = Vec::new();\n- worktree::encode_to_git(input, encoding(\"UTF-8\"), &mut buf, round_trip)?;\n+ worktree::encode_to_git(input, encoding_rs::UTF_8, &mut buf, round_trip)?;\n assert_eq!(buf.as_bstr(), input)\n }\n Ok(())\n }\n+}\n+\n+mod encode_to_worktree {\n+ use bstr::ByteSlice;\n+ use gix_filter::worktree;\n+ use gix_filter::worktree::encode_to_git::RoundTrip;\n \n- fn encoding(label: &str) -> &'static encoding_rs::Encoding {\n- worktree::encoding::for_label(label).expect(\"encoding is valid and known at compile time\")\n+ #[test]\n+ fn shift_jis() -> crate::Result {\n+ let input = \"\u30cf\u30ed\u30fc\u30ef\u30fc\u30eb\u30c9\";\n+ let mut buf = Vec::new();\n+ worktree::encode_to_worktree(input.as_bytes(), encoding_rs::SHIFT_JIS, &mut buf)?;\n+\n+ let mut re_encoded = Vec::new();\n+ worktree::encode_to_git(&buf, encoding_rs::SHIFT_JIS, &mut re_encoded, RoundTrip::Validate)?;\n+\n+ assert_eq!(re_encoded.as_bstr(), input, \"this should be round-trippable too\");\n+ Ok(())\n }\n }\n", "diff --git a/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts b/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\nindex 1a626fa..b82e7f6 100644\n--- a/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\n+++ b/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\n@@ -1,6 +1,7 @@\n import { expect } from '@playwright/test';\n import BasePage from '../../../Base';\n import { ToolbarPage } from './index';\n+import { UITypes } from 'nocodb-sdk';\n \n export class ToolbarFilterPage extends BasePage {\n readonly toolbar: ToolbarPage;\n@@ -33,11 +34,13 @@ export class ToolbarFilterPage extends BasePage {\n opType,\n value,\n isLocallySaved,\n+ dataType,\n }: {\n columnTitle: string;\n opType: string;\n value?: string;\n isLocallySaved: boolean;\n+ dataType?: string;\n }) {\n await this.get().locator(`button:has-text(\"Add Filter\")`).first().click();\n \n@@ -86,14 +89,25 @@ export class ToolbarFilterPage extends BasePage {\n \n // if value field was provided, fill it\n if (value) {\n- const fillFilter = this.rootPage.locator('.nc-filter-value-select > input').last().fill(value);\n- await this.waitForResponse({\n- uiAction: fillFilter,\n- httpMethodsToMatch: ['GET'],\n- requestUrlPathToMatch: isLocallySaved ? `/api/v1/db/public/` : `/api/v1/db/data/noco/`,\n- });\n- await this.toolbar.parent.dashboard.waitForLoaderToDisappear();\n- await this.toolbar.parent.waitLoading();\n+ let fillFilter: any = null;\n+ switch (dataType) {\n+ case UITypes.Rating:\n+ await this.get('.nc-filter-value-select')\n+ .locator('.ant-rate-star > div')\n+ .nth(parseInt(value) - 1)\n+ .click();\n+ break;\n+ default:\n+ fillFilter = this.rootPage.locator('.nc-filter-value-select > input').last().fill(value);\n+ await this.waitForResponse({\n+ uiAction: fillFilter,\n+ httpMethodsToMatch: ['GET'],\n+ requestUrlPathToMatch: isLocallySaved ? `/api/v1/db/public/` : `/api/v1/db/data/noco/`,\n+ });\n+ await this.toolbar.parent.dashboard.waitForLoaderToDisappear();\n+ await this.toolbar.parent.waitLoading();\n+ break;\n+ }\n }\n }\n \ndiff --git a/tests/playwright/tests/filters.spec.ts b/tests/playwright/tests/filters.spec.ts\nindex 774a70a..48d949a 100644\n--- a/tests/playwright/tests/filters.spec.ts\n+++ b/tests/playwright/tests/filters.spec.ts\n@@ -36,7 +36,13 @@ async function validateRowArray(param) {\n // }\n }\n \n-async function verifyFilter(param: { column: string; opType: string; value?: string; result: { rowCount: number } }) {\n+async function verifyFilter(param: {\n+ column: string;\n+ opType: string;\n+ value?: string;\n+ result: { rowCount: number };\n+ dataType?: string;\n+}) {\n // if opType was included in skip list, skip it\n if (skipList[param.column]?.includes(param.opType)) {\n return;\n@@ -48,6 +54,7 @@ async function verifyFilter(param: { column: string; opType: string; value?: str\n opType: param.opType,\n value: param.value,\n isLocallySaved: false,\n+ dataType: param?.dataType,\n });\n await toolbar.clickFilter();\n \n@@ -414,4 +421,74 @@ test.describe('Filter Tests: Numerical', () => {\n });\n }\n });\n+\n+ test('Filter: Rating', async () => {\n+ // close 'Team & Auth' tab\n+ await dashboard.closeTab({ title: 'Team & Auth' });\n+ await dashboard.treeView.openTable({ title: 'numberBased' });\n+ const dataType = 'Rating';\n+\n+ const filterList = [\n+ {\n+ op: '=',\n+ value: '3',\n+ rowCount: records.list.filter(r => r[dataType] === 3).length,\n+ },\n+ {\n+ op: '!=',\n+ value: '3',\n+ rowCount: records.list.filter(r => r[dataType] !== 3).length,\n+ },\n+ {\n+ op: 'is null',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] === null).length,\n+ },\n+ {\n+ op: 'is not null',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] !== null).length,\n+ },\n+ {\n+ op: 'is blank',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] === null).length,\n+ },\n+ {\n+ op: 'is not blank',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] !== null).length,\n+ },\n+ {\n+ op: '>',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] > 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '>=',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] >= 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '<',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] < 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '<=',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] <= 2 && r[dataType] != null).length,\n+ },\n+ ];\n+\n+ for (let i = 0; i < filterList.length; i++) {\n+ await verifyFilter({\n+ column: dataType,\n+ opType: filterList[i].op,\n+ value: filterList[i].value,\n+ result: { rowCount: filterList[i].rowCount },\n+ dataType: dataType,\n+ });\n+ }\n+ });\n });\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n"]
4
["e52a6201093f273add4903dd5f4e55a63539386d", "d1fed3e9907d0a9e3fe45dbfe2ff27bd10b3e1f4", "de88de81551d3e2619444a25a68170c9ed35a9b5", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50"]
["refactor", "feat", "test", "build"]
update CI images from docker buster to bullseye This will break `perf_image` until the new CI image is built due to the newly required `--all-tags` parameter to `docker push` that isn't available for the docker version we run on buster.,skip if related view/hook/column of a filter is not found Signed-off-by: Pranav C <[email protected]>,update dependencies,convert to record
["diff --git a/.circleci/config.yml b/.circleci/config.yml\nindex f8a53ba..c378c7e 100644\n--- a/.circleci/config.yml\n+++ b/.circleci/config.yml\n@@ -336,7 +336,7 @@ jobs:\n # Disabling for now, and tracked further investigations\n # in https://github.com/influxdata/k8s-idpe/issues/3038\n docker_layer_caching: false\n- version: 19.03.14\n+ version: 20.10.7\n - run: |\n sudo apt-get update\n sudo apt-get install -y docker.io\n@@ -355,7 +355,7 @@ jobs:\n BRANCH=$(git rev-parse --abbrev-ref HEAD | tr '/' '.')\n COMMIT_SHA=$(git rev-parse --short HEAD)\n docker build -t quay.io/influxdb/iox:$COMMIT_SHA -t quay.io/influxdb/iox:main -f docker/Dockerfile.iox .\n- docker push quay.io/influxdb/iox\n+ docker push --all-tags quay.io/influxdb/iox\n echo \"export COMMIT_SHA=${COMMIT_SHA}\" >> $BASH_ENV\n - run:\n name: Deploy tags\ndiff --git a/Dockerfile b/Dockerfile\nindex 8c23ea2..1df1fd2 100644\n--- a/Dockerfile\n+++ b/Dockerfile\n@@ -17,7 +17,7 @@ RUN \\\n cp /influxdb_iox/target/release/influxdb_iox /root/influxdb_iox && \\\n du -cshx /usr/local/cargo/registry /usr/local/cargo/git /influxdb_iox/target\n \n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates --no-install-recommends \\\ndiff --git a/docker/Dockerfile.ci b/docker/Dockerfile.ci\nindex db0a8ca..cf9cd15 100644\n--- a/docker/Dockerfile.ci\n+++ b/docker/Dockerfile.ci\n@@ -12,7 +12,7 @@\n \n ARG RUST_VERSION\n # Build actual image used for CI pipeline\n-FROM rust:${RUST_VERSION}-slim-buster\n+FROM rust:${RUST_VERSION}-slim-bullseye\n \n # When https://github.com/rust-lang/rustup/issues/2686 is fixed, run the command added that\n # will install everything in rust-toolchain.toml here so that components are in the container\n@@ -42,7 +42,7 @@ COPY docker/redpanda.gpg /tmp/redpanda.gpg\n # Generated from https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/cfg/setup/bash.deb.sh\n RUN apt-key add /tmp/redpanda.gpg \\\n && rm /tmp/redpanda.gpg \\\n- && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=buster&version=10&arch=x86_64\" \\\n+ && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=bullseye&version=10&arch=x86_64\" \\\n > /etc/apt/sources.list.d/vectorized-redpanda.list \\\n && apt-get update \\\n && apt-get install -y redpanda \\\ndiff --git a/docker/Dockerfile.iox b/docker/Dockerfile.iox\nindex 42414db..ae1f38e 100644\n--- a/docker/Dockerfile.iox\n+++ b/docker/Dockerfile.iox\n@@ -1,7 +1,7 @@\n ###\n # Dockerfile used for deploying IOx\n ##\n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates gettext-base --no-install-recommends \\\n", "diff --git a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\nindex 1515f88..6c250bd 100644\n--- a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\n+++ b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\n@@ -21,7 +21,13 @@ export default async function ({ ncMeta }: NcUpgraderCtx) {\n } else {\n continue;\n }\n- if (filter.project_id != model.project_id) {\n+\n+ // skip if related model is not found\n+ if (!model) {\n+ continue;\n+ }\n+\n+ if (filter.project_id !== model.project_id) {\n await ncMeta.metaUpdate(\n null,\n null,\n", "diff --git a/example/exp.json b/example/exp.json\nindex ea820f9..b838093 100644\n--- a/example/exp.json\n+++ b/example/exp.json\n@@ -1,8 +1,8 @@\n {\n- \"name\": \"react-native-paper-example\",\n+ \"name\": \"React Native Paper Example\",\n \"description\": \"Example for React Native Paper\",\n \"slug\": \"react-native-paper-example\",\n- \"sdkVersion\": \"10.0.0\",\n+ \"sdkVersion\": \"11.0.0\",\n \"version\": \"1.0.0\",\n \"orientation\": \"portrait\",\n \"primaryColor\": \"#cccccc\",\ndiff --git a/example/package.json b/example/package.json\nindex c4d049a..7e2baeb 100644\n--- a/example/package.json\n+++ b/example/package.json\n@@ -6,10 +6,10 @@\n \"private\": true,\n \"main\": \"main.js\",\n \"dependencies\": {\n- \"@exponent/ex-navigation\": \"^1.7.0\",\n- \"exponent\": \"^10.0.4\",\n+ \"@exponent/ex-navigation\": \"^2.0.0\",\n+ \"exponent\": \"^11.0.2\",\n \"react\": \"~15.3.2\",\n- \"react-native\": \"github:exponentjs/react-native#sdk-10.1.2\",\n+ \"react-native\": \"github:exponentjs/react-native#sdk-11.0.3\",\n \"react-native-paper\": \"file:../\",\n \"react-native-vector-icons\": \"git+https://github.com/exponentjs/react-native-vector-icons.git\"\n }\ndiff --git a/package.json b/package.json\nindex 65afbbc..326ab48 100644\n--- a/package.json\n+++ b/package.json\n@@ -27,17 +27,17 @@\n \"react-native-vector-icons\": \"*\"\n },\n \"devDependencies\": {\n- \"babel-eslint\": \"^7.0.0\",\n- \"eslint\": \"^3.8.1\",\n+ \"babel-eslint\": \"^7.1.0\",\n+ \"eslint\": \"^3.9.1\",\n \"eslint-plugin-babel\": \"^3.3.0\",\n- \"eslint-plugin-import\": \"^2.0.1\",\n- \"eslint-plugin-react\": \"^6.4.1\",\n+ \"eslint-plugin-import\": \"^2.2.0\",\n+ \"eslint-plugin-react\": \"^6.6.0\",\n \"eslint-plugin-react-native\": \"^2.0.0\",\n \"flow-bin\": \"^0.32.0\",\n \"react\": \"latest\",\n \"react-native\": \"latest\"\n },\n \"dependencies\": {\n- \"color\": \"^0.11.3\"\n+ \"color\": \"^0.11.4\"\n }\n }\n", "diff --git a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\nindex cc998c6..65c8550 100755\n--- a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n@@ -167,13 +167,8 @@ public final class ExporterDirectorDistributionTest {\n * <p>This makes sure that even if we miss one export position event, we distribute the event\n * later again, which makes tests less flaky.\n */\n- private static final class ClockShifter implements ConditionEvaluationListener<Void> {\n-\n- private final ControlledActorClock clock;\n-\n- public ClockShifter(final ControlledActorClock clock) {\n- this.clock = clock;\n- }\n+ private record ClockShifter(ControlledActorClock clock)\n+ implements ConditionEvaluationListener<Void> {\n \n @Override\n public void conditionEvaluated(final EvaluatedCondition<Void> condition) {\n"]
4
["640cd88df3069a97d8244398414338dd317c5470", "ab1e60a97c6d5c688dacbd23bca40cb8f20c4ac3", "ecc481f9f501aa34b41e06e7bbdde6e79f8ca1bb", "3346331a963766c8193170fb130adad2e658ada2"]
["ci", "fix", "build", "refactor"]
fix deploy,update version (v0.6.18),licensing,add important to override paragraphs in items
["diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml\nindex 3830f4c..3b14ee5 100644\n--- a/.github/workflows/deploy.yaml\n+++ b/.github/workflows/deploy.yaml\n@@ -67,7 +67,7 @@ jobs:\n run: aws s3 cp .next/static s3://cdn.rs.school/_next/static/ --recursive --cache-control \"public,max-age=15552000,immutable\"\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -117,7 +117,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -167,7 +167,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\nindex a4aee6b..bb523fa 100644\n--- a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n+++ b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n@@ -1,3 +1,18 @@\n+/*\n+ * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n package io.atomix.cluster.messaging.impl;\n \n import static org.assertj.core.api.Assertions.assertThat;\n", "diff --git a/packages/core/src/components/text/text.ios.scss b/packages/core/src/components/text/text.ios.scss\nindex a3c58e2..2a020ab 100644\n--- a/packages/core/src/components/text/text.ios.scss\n+++ b/packages/core/src/components/text/text.ios.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-ios) {\n \n .text-ios-#{$color-name},\n- .text-ios-#{$color-name} a {\n- color: $color-base;\n+ .text-ios-#{$color-name} a,\n+ .text-ios-#{$color-name} p {\n+ color: $color-base !important\n }\n \n }\ndiff --git a/packages/core/src/components/text/text.md.scss b/packages/core/src/components/text/text.md.scss\nindex b397acb..050af1a 100644\n--- a/packages/core/src/components/text/text.md.scss\n+++ b/packages/core/src/components/text/text.md.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-md) {\n \n .text-md-#{$color-name},\n- .text-md-#{$color-name} a {\n- color: $color-base;\n+ .text-md-#{$color-name} a,\n+ .text-md-#{$color-name} p {\n+ color: $color-base !important;\n }\n \n }\n"]
4
["7785be09053049b30cf41b420c59f051cd0129fc", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50", "cbe62140ce219da84772e21e7cfb4b5c2a25c1b8", "7ab363f7ba2807b3eb9895e47f4fcd058f43ae5e"]
["ci", "build", "docs", "test"]
Handle different events.,typo,temporary do no run "verify-ffmpeg.py" on Mac CI (#14986),add canonical `_name` to edge packages
["diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js\nindex 9075d7c..9f16e67 100644\n--- a/src/notebook/epics/kernel-launch.js\n+++ b/src/notebook/epics/kernel-launch.js\n@@ -113,6 +113,12 @@ export function newKernelObservable(kernelSpec: KernelInfo, cwd: string) {\n observer.error({ type: 'ERROR', payload: error, err: true });\n observer.complete();\n });\n+ spawn.on('exit', () => {\n+ observer.complete();\n+ });\n+ spawn.on('disconnect', () => {\n+ observer.complete();\n+ });\n });\n });\n }\n", "diff --git a/README.md b/README.md\nindex b823d75..53f265d 100644\n--- a/README.md\n+++ b/README.md\n@@ -74,7 +74,7 @@ With oclif you can create 2 different CLI types, single and multi.\n \n Single CLIs are like `ls` or `cat`. They can accept arguments and flags. Single CLIs can optionally be just be a single file.\n \n-Multi CLIs are like `git` or `heroku`. They have subcommands that are themselves single CLIs commands. In the `package.json` there is a field `oclif.commands` that points to a directory. This directory contains all the subcommands for the CLI. For example, if you wanted a CLI called `mycli` with the commands `mycli create` and `mycli destroy`, you would have a project like the following:\n+Multi CLIs are like `git` or `heroku`. They have subcommands that are themselves single CLI commands. In the `package.json` there is a field `oclif.commands` that points to a directory. This directory contains all the subcommands for the CLI. For example, if you wanted a CLI called `mycli` with the commands `mycli create` and `mycli destroy`, you would have a project like the following:\n \n ```\n package.json\n", "diff --git a/vsts.yml b/vsts.yml\nindex 6cb0eb3..a058238 100644\n--- a/vsts.yml\n+++ b/vsts.yml\n@@ -86,13 +86,13 @@ jobs:\n killall Electron\n fi\n displayName: Make sure Electron isn't running from previous tests\n-\n- - bash: |\n- cd src\n- python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n- displayName: Verify non proprietary ffmpeg\n- condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n- timeoutInMinutes: 5\n+# FIXME(alexeykuzmin)\n+# - bash: |\n+# cd src\n+# python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n+# displayName: Verify non proprietary ffmpeg\n+# condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n+# timeoutInMinutes: 5\n \n - bash: |\n cd src\n", "diff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\nindex e92e3c9..0b7a11a 100644\n--- a/scripts/bump-edge.ts\n+++ b/scripts/bump-edge.ts\n@@ -53,6 +53,7 @@ async function loadWorkspace (dir: string) {\n }\n \n const rename = (from: string, to: string) => {\n+ find(from).data._name = find(from).data.name\n find(from).data.name = to\n for (const pkg of packages) {\n pkg.updateDeps((dep) => {\n"]
4
["a280a52c8309465276c3509848ddcddbe19732b6", "06c12fb603e3a38eca0340a92719ee59d34a9f47", "9187415f5ee35d2e88dd834e413fc16bf19c5db1", "573f87edf9bdc19c9c4c3a978fad6ed3ce788f5f"]
["fix", "docs", "ci", "build"]
abort parallel stages if one failed,fixed tick interval,Port shard precreation service from InfluxDB 1.x Provides new configuration parameters: ``` --storage-shard-precreator-advance-period --storage-shard-precreator-check-interval ``` Closes #19520,use ng2 loadNextToLocation
["diff --git a/Jenkinsfile b/Jenkinsfile\nindex 168f446..a4da961 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -28,6 +28,7 @@ pipeline {\n }\n \n stage('Verify') {\n+ failFast true\n parallel {\n stage('Tests') {\n steps {\n", "diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go\nindex 4a5e764..35c3ff2 100644\n--- a/backend/services/integrations/main.go\n+++ b/backend/services/integrations/main.go\n@@ -54,7 +54,7 @@ func main() {\n \tsigchan := make(chan os.Signal, 1)\n \tsignal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)\n \n-\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL)\n+\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL * time.Millisecond)\n \n \tlog.Printf(\"Integration service started\\n\")\n \tmanager.RequestAll()\n@@ -66,7 +66,7 @@ func main() {\n \t\t\tpg.Close()\n \t\t\tos.Exit(0)\n \t\tcase <-tick:\n-\t\t\t// log.Printf(\"Requesting all...\\n\")\n+\t\t\tlog.Printf(\"Requesting all...\\n\")\n \t\t\tmanager.RequestAll()\n \t\tcase event := <-manager.Events:\n \t\t\t// log.Printf(\"New integration event: %v\\n\", *event.RawErrorEvent)\n", "diff --git a/cmd/influxd/launcher/launcher.go b/cmd/influxd/launcher/launcher.go\nindex e3548ef..5559e94 100644\n--- a/cmd/influxd/launcher/launcher.go\n+++ b/cmd/influxd/launcher/launcher.go\n@@ -440,6 +440,16 @@ func launcherOpts(l *Launcher) []cli.Opt {\n \t\t\tFlag: \"storage-retention-check-interval\",\n \t\t\tDesc: \"The interval of time when retention policy enforcement checks run.\",\n \t\t},\n+\t\t{\n+\t\t\tDestP: &l.StorageConfig.PrecreatorConfig.CheckInterval,\n+\t\t\tFlag: \"storage-shard-precreator-check-interval\",\n+\t\t\tDesc: \"The interval of time when the check to pre-create new shards runs.\",\n+\t\t},\n+\t\t{\n+\t\t\tDestP: &l.StorageConfig.PrecreatorConfig.AdvancePeriod,\n+\t\t\tFlag: \"storage-shard-precreator-advance-period\",\n+\t\t\tDesc: \"The default period ahead of the endtime of a shard group that its successor group is created.\",\n+\t\t},\n \n \t\t// InfluxQL Coordinator Config\n \t\t{\ndiff --git a/storage/config.go b/storage/config.go\nindex ef953a2..d8e24db 100644\n--- a/storage/config.go\n+++ b/storage/config.go\n@@ -2,6 +2,7 @@ package storage\n \n import (\n \t\"github.com/influxdata/influxdb/v2/tsdb\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/retention\"\n )\n \n@@ -10,6 +11,7 @@ type Config struct {\n \tData tsdb.Config\n \n \tRetentionService retention.Config\n+\tPrecreatorConfig precreator.Config\n }\n \n // NewConfig initialises a new config for an Engine.\n@@ -17,5 +19,6 @@ func NewConfig() Config {\n \treturn Config{\n \t\tData: tsdb.NewConfig(),\n \t\tRetentionService: retention.NewConfig(),\n+\t\tPrecreatorConfig: precreator.NewConfig(),\n \t}\n }\ndiff --git a/storage/engine.go b/storage/engine.go\nindex 8518f48..ae37fdd 100644\n--- a/storage/engine.go\n+++ b/storage/engine.go\n@@ -19,6 +19,7 @@ import (\n \t_ \"github.com/influxdata/influxdb/v2/tsdb/index/tsi1\"\n \t\"github.com/influxdata/influxdb/v2/v1/coordinator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/meta\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/retention\"\n \t\"github.com/influxdata/influxql\"\n \t\"github.com/pkg/errors\"\n@@ -42,7 +43,8 @@ type Engine struct {\n \t\tWritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error\n \t}\n \n-\tretentionService *retention.Service\n+\tretentionService *retention.Service\n+\tprecreatorService *precreator.Service\n \n \tdefaultMetricLabels prometheus.Labels\n \n@@ -66,6 +68,7 @@ type MetaClient interface {\n \tDatabase(name string) (di *meta.DatabaseInfo)\n \tDatabases() []meta.DatabaseInfo\n \tDeleteShardGroup(database, policy string, id uint64) error\n+\tPrecreateShardGroups(now, cutoff time.Time) error\n \tPruneShardGroups() error\n \tRetentionPolicy(database, policy string) (*meta.RetentionPolicyInfo, error)\n \tShardGroupsByTimeRange(database, policy string, min, max time.Time) (a []meta.ShardGroupInfo, err error)\n@@ -115,6 +118,9 @@ func NewEngine(path string, c Config, options ...Option) *Engine {\n \te.retentionService.TSDBStore = e.tsdbStore\n \te.retentionService.MetaClient = e.metaClient\n \n+\te.precreatorService = precreator.NewService(c.PrecreatorConfig)\n+\te.precreatorService.MetaClient = e.metaClient\n+\n \treturn e\n }\n \n@@ -132,6 +138,10 @@ func (e *Engine) WithLogger(log *zap.Logger) {\n \tif e.retentionService != nil {\n \t\te.retentionService.WithLogger(log)\n \t}\n+\n+\tif e.precreatorService != nil {\n+\t\te.precreatorService.WithLogger(log)\n+\t}\n }\n \n // PrometheusCollectors returns all the prometheus collectors associated with\n@@ -161,6 +171,10 @@ func (e *Engine) Open(ctx context.Context) (err error) {\n \t\treturn err\n \t}\n \n+\tif err := e.precreatorService.Open(ctx); err != nil {\n+\t\treturn err\n+\t}\n+\n \te.closing = make(chan struct{})\n \n \treturn nil\n@@ -194,6 +208,10 @@ func (e *Engine) Close() error {\n \n \tvar retErr *multierror.Error\n \n+\tif err := e.precreatorService.Close(); err != nil {\n+\t\tretErr = multierror.Append(retErr, fmt.Errorf(\"error closing shard precreator service: %w\", err))\n+\t}\n+\n \tif err := e.retentionService.Close(); err != nil {\n \t\tretErr = multierror.Append(retErr, fmt.Errorf(\"error closing retention service: %w\", err))\n \t}\ndiff --git a/v1/services/precreator/README.md b/v1/services/precreator/README.md\nnew file mode 100644\nindex 0000000..8830b73\n--- /dev/null\n+++ b/v1/services/precreator/README.md\n@@ -0,0 +1,13 @@\n+Shard Precreation\n+============\n+\n+During normal operation when InfluxDB receives time-series data, it writes the data to files known as _shards_. Each shard only contains data for a specific range of time. Therefore, before data can be accepted by the system, the shards must exist and InfluxDB always checks that the required shards exist for every incoming data point. If the required shards do not exist, InfluxDB will create those shards. Because this requires a cluster to reach consensus, the process is not instantaneous and can temporarily impact write-throughput.\n+\n+Since almost all time-series data is written sequentially in time, the system has an excellent idea of the timestamps of future data. Shard precreation takes advantage of this fact by creating required shards ahead of time, thereby ensuring the required shards exist by the time new time-series data actually arrives. Write-throughput is therefore not affected when data is first received for a range of time that would normally trigger shard creation.\n+\n+Note that the shard-existence check must remain in place in the code, even with shard precreation. This is because while most data is written sequentially in time, this is not always the case. Data may be written with timestamps in the past, or farther in the future than shard precreation handles.\n+\n+## Configuration\n+Shard precreation can be disabled if necessary, though this is not recommended. If it is disabled, then shards will be only be created when explicitly needed.\n+\n+The interval between runs of the shard precreation service, as well as the time-in-advance the shards are created, are also configurable. The defaults should work for most deployments.\ndiff --git a/v1/services/precreator/config.go b/v1/services/precreator/config.go\nnew file mode 100644\nindex 0000000..5e994e6\n--- /dev/null\n+++ b/v1/services/precreator/config.go\n@@ -0,0 +1,65 @@\n+package precreator\n+\n+import (\n+\t\"errors\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/monitor/diagnostics\"\n+)\n+\n+const (\n+\t// DefaultCheckInterval is the shard precreation check time if none is specified.\n+\tDefaultCheckInterval = 10 * time.Minute\n+\n+\t// DefaultAdvancePeriod is the default period ahead of the endtime of a shard group\n+\t// that its successor group is created.\n+\tDefaultAdvancePeriod = 30 * time.Minute\n+)\n+\n+// Config represents the configuration for shard precreation.\n+type Config struct {\n+\tEnabled bool `toml:\"enabled\"`\n+\tCheckInterval toml.Duration `toml:\"check-interval\"`\n+\tAdvancePeriod toml.Duration `toml:\"advance-period\"`\n+}\n+\n+// NewConfig returns a new Config with defaults.\n+func NewConfig() Config {\n+\treturn Config{\n+\t\tEnabled: true,\n+\t\tCheckInterval: toml.Duration(DefaultCheckInterval),\n+\t\tAdvancePeriod: toml.Duration(DefaultAdvancePeriod),\n+\t}\n+}\n+\n+// Validate returns an error if the Config is invalid.\n+func (c Config) Validate() error {\n+\tif !c.Enabled {\n+\t\treturn nil\n+\t}\n+\n+\tif c.CheckInterval <= 0 {\n+\t\treturn errors.New(\"check-interval must be positive\")\n+\t}\n+\tif c.AdvancePeriod <= 0 {\n+\t\treturn errors.New(\"advance-period must be positive\")\n+\t}\n+\n+\treturn nil\n+}\n+\n+// Diagnostics returns a diagnostics representation of a subset of the Config.\n+func (c Config) Diagnostics() (*diagnostics.Diagnostics, error) {\n+\tif !c.Enabled {\n+\t\treturn diagnostics.RowFromMap(map[string]interface{}{\n+\t\t\t\"enabled\": false,\n+\t\t}), nil\n+\t}\n+\n+\treturn diagnostics.RowFromMap(map[string]interface{}{\n+\t\t\"enabled\": true,\n+\t\t\"check-interval\": c.CheckInterval,\n+\t\t\"advance-period\": c.AdvancePeriod,\n+\t}), nil\n+}\ndiff --git a/v1/services/precreator/config_test.go b/v1/services/precreator/config_test.go\nnew file mode 100644\nindex 0000000..2686001\n--- /dev/null\n+++ b/v1/services/precreator/config_test.go\n@@ -0,0 +1,67 @@\n+package precreator_test\n+\n+import (\n+\t\"testing\"\n+\t\"time\"\n+\n+\t\"github.com/BurntSushi/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+)\n+\n+func TestConfig_Parse(t *testing.T) {\n+\t// Parse configuration.\n+\tvar c precreator.Config\n+\tif _, err := toml.Decode(`\n+enabled = true\n+check-interval = \"2m\"\n+advance-period = \"10m\"\n+`, &c); err != nil {\n+\n+\t\tt.Fatal(err)\n+\t}\n+\n+\t// Validate configuration.\n+\tif !c.Enabled {\n+\t\tt.Fatalf(\"unexpected enabled state: %v\", c.Enabled)\n+\t} else if time.Duration(c.CheckInterval) != 2*time.Minute {\n+\t\tt.Fatalf(\"unexpected check interval: %s\", c.CheckInterval)\n+\t} else if time.Duration(c.AdvancePeriod) != 10*time.Minute {\n+\t\tt.Fatalf(\"unexpected advance period: %s\", c.AdvancePeriod)\n+\t}\n+}\n+\n+func TestConfig_Validate(t *testing.T) {\n+\tc := precreator.NewConfig()\n+\tif err := c.Validate(); err != nil {\n+\t\tt.Fatalf(\"unexpected validation fail from NewConfig: %s\", err)\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.CheckInterval = 0\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for check-interval = 0, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.CheckInterval *= -1\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for negative check-interval, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.AdvancePeriod = 0\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for advance-period = 0, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.AdvancePeriod *= -1\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for negative advance-period, got nil\")\n+\t}\n+\n+\tc.Enabled = false\n+\tif err := c.Validate(); err != nil {\n+\t\tt.Fatalf(\"unexpected validation fail from disabled config: %s\", err)\n+\t}\n+}\ndiff --git a/v1/services/precreator/service.go b/v1/services/precreator/service.go\nnew file mode 100644\nindex 0000000..28e8f16\n--- /dev/null\n+++ b/v1/services/precreator/service.go\n@@ -0,0 +1,93 @@\n+// Package precreator provides the shard precreation service.\n+package precreator // import \"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+\n+import (\n+\t\"context\"\n+\t\"sync\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/logger\"\n+\t\"go.uber.org/zap\"\n+)\n+\n+// Service manages the shard precreation service.\n+type Service struct {\n+\tcheckInterval time.Duration\n+\tadvancePeriod time.Duration\n+\n+\tLogger *zap.Logger\n+\n+\tcancel context.CancelFunc\n+\twg sync.WaitGroup\n+\n+\tMetaClient interface {\n+\t\tPrecreateShardGroups(now, cutoff time.Time) error\n+\t}\n+}\n+\n+// NewService returns an instance of the precreation service.\n+func NewService(c Config) *Service {\n+\treturn &Service{\n+\t\tcheckInterval: time.Duration(c.CheckInterval),\n+\t\tadvancePeriod: time.Duration(c.AdvancePeriod),\n+\t\tLogger: zap.NewNop(),\n+\t}\n+}\n+\n+// WithLogger sets the logger for the service.\n+func (s *Service) WithLogger(log *zap.Logger) {\n+\ts.Logger = log.With(zap.String(\"service\", \"shard-precreation\"))\n+}\n+\n+// Open starts the precreation service.\n+func (s *Service) Open(ctx context.Context) error {\n+\tif s.cancel != nil {\n+\t\treturn nil\n+\t}\n+\n+\ts.Logger.Info(\"Starting precreation service\",\n+\t\tlogger.DurationLiteral(\"check_interval\", s.checkInterval),\n+\t\tlogger.DurationLiteral(\"advance_period\", s.advancePeriod))\n+\n+\tctx, s.cancel = context.WithCancel(ctx)\n+\n+\ts.wg.Add(1)\n+\tgo s.runPrecreation(ctx)\n+\treturn nil\n+}\n+\n+// Close stops the precreation service.\n+func (s *Service) Close() error {\n+\tif s.cancel == nil {\n+\t\treturn nil\n+\t}\n+\n+\ts.cancel()\n+\ts.wg.Wait()\n+\ts.cancel = nil\n+\n+\treturn nil\n+}\n+\n+// runPrecreation continually checks if resources need precreation.\n+func (s *Service) runPrecreation(ctx context.Context) {\n+\tdefer s.wg.Done()\n+\n+\tfor {\n+\t\tselect {\n+\t\tcase <-time.After(s.checkInterval):\n+\t\t\tif err := s.precreate(time.Now().UTC()); err != nil {\n+\t\t\t\ts.Logger.Info(\"Failed to precreate shards\", zap.Error(err))\n+\t\t\t}\n+\t\tcase <-ctx.Done():\n+\t\t\ts.Logger.Info(\"Terminating precreation service\")\n+\t\t\treturn\n+\t\t}\n+\t}\n+}\n+\n+// precreate performs actual resource precreation.\n+func (s *Service) precreate(now time.Time) error {\n+\tcutoff := now.Add(s.advancePeriod).UTC()\n+\treturn s.MetaClient.PrecreateShardGroups(now, cutoff)\n+}\ndiff --git a/v1/services/precreator/service_test.go b/v1/services/precreator/service_test.go\nnew file mode 100644\nindex 0000000..20289b7\n--- /dev/null\n+++ b/v1/services/precreator/service_test.go\n@@ -0,0 +1,56 @@\n+package precreator_test\n+\n+import (\n+\t\"context\"\n+\t\"os\"\n+\t\"testing\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/logger\"\n+\t\"github.com/influxdata/influxdb/v2/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/internal\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+)\n+\n+func TestShardPrecreation(t *testing.T) {\n+\tdone := make(chan struct{})\n+\tprecreate := false\n+\n+\tvar mc internal.MetaClientMock\n+\tmc.PrecreateShardGroupsFn = func(now, cutoff time.Time) error {\n+\t\tif !precreate {\n+\t\t\tclose(done)\n+\t\t\tprecreate = true\n+\t\t}\n+\t\treturn nil\n+\t}\n+\n+\ts := NewTestService()\n+\ts.MetaClient = &mc\n+\n+\tif err := s.Open(context.Background()); err != nil {\n+\t\tt.Fatalf(\"unexpected open error: %s\", err)\n+\t}\n+\tdefer s.Close() // double close should not cause a panic\n+\n+\ttimer := time.NewTimer(100 * time.Millisecond)\n+\tselect {\n+\tcase <-done:\n+\t\ttimer.Stop()\n+\tcase <-timer.C:\n+\t\tt.Errorf(\"timeout exceeded while waiting for precreate\")\n+\t}\n+\n+\tif err := s.Close(); err != nil {\n+\t\tt.Fatalf(\"unexpected close error: %s\", err)\n+\t}\n+}\n+\n+func NewTestService() *precreator.Service {\n+\tconfig := precreator.NewConfig()\n+\tconfig.CheckInterval = toml.Duration(10 * time.Millisecond)\n+\n+\ts := precreator.NewService(config)\n+\ts.WithLogger(logger.New(os.Stderr))\n+\treturn s\n+}\n", "diff --git a/ionic/components/nav/nav-controller.ts b/ionic/components/nav/nav-controller.ts\nindex 8e23c4c..37ac0f4 100644\n--- a/ionic/components/nav/nav-controller.ts\n+++ b/ionic/components/nav/nav-controller.ts\n@@ -527,41 +527,13 @@ export class NavController extends Ion {\n * @private\n * TODO\n */\n- createViewComponentRef(type, hostProtoViewRef, viewContainer, viewCtrlBindings) {\n- let bindings = this.bindings.concat(viewCtrlBindings);\n-\n- // the same guts as DynamicComponentLoader.loadNextToLocation\n- var hostViewRef =\n- viewContainer.createHostView(hostProtoViewRef, viewContainer.length, bindings);\n- var newLocation = this._viewManager.getHostElement(hostViewRef);\n- var component = this._viewManager.getComponent(newLocation);\n-\n- var dispose = () => {\n- var index = viewContainer.indexOf(hostViewRef);\n- if (index !== -1) {\n- viewContainer.remove(index);\n- }\n- };\n-\n- // TODO: make-shift ComponentRef_, this is pretty much going to\n- // break in future versions of ng2, keep an eye on it\n- return {\n- location: newLocation,\n- instance: component,\n- dispose: dispose\n- };\n- }\n-\n- /**\n- * @private\n- * TODO\n- */\n- getBindings(viewCtrl) {\n- // create bindings to this ViewController and its NavParams\n- return this.bindings.concat(Injector.resolve([\n+ loadNextToAnchor(type, location, viewCtrl) {\n+ let bindings = this.bindings.concat(Injector.resolve([\n bind(ViewController).toValue(viewCtrl),\n bind(NavParams).toValue(viewCtrl.params),\n ]));\n+\n+ return this._loader.loadNextToLocation(type, location, bindings);\n }\n \n /**\ndiff --git a/ionic/components/nav/nav.ts b/ionic/components/nav/nav.ts\nindex a98a4ef..063eeb9 100644\n--- a/ionic/components/nav/nav.ts\n+++ b/ionic/components/nav/nav.ts\n@@ -192,65 +192,70 @@ export class Nav extends NavController {\n if (structure.tabs) {\n // the component being loaded is an <ion-tabs>\n // Tabs is essentially a pane, cuz it has its own navbar and content containers\n- let contentContainerRef = this._viewManager.getViewContainer(this.anchorElementRef());\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, contentContainerRef, this.getBindings(viewCtrl));\n- viewComponentRef.instance._paneView = true;\n+ this.loadNextToAnchor(componentType, this.anchorElementRef(), viewCtrl).then(componentRef => {\n \n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ componentRef.instance._paneView = true;\n+\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n+\n+ viewCtrl.onReady().then(() => {\n+ done();\n+ });\n \n- viewCtrl.onReady().then(() => {\n- done();\n });\n \n } else {\n // normal ion-view going into pane\n this.getPane(structure, viewCtrl, (pane) => {\n // add the content of the view into the pane's content area\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, pane.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n+ this.loadNextToAnchor(componentType, pane.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // remove the pane if there are no view items left\n- pane.totalViews--;\n- if (pane.totalViews === 0) {\n- pane.dispose && pane.dispose();\n- }\n- });\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n \n- // count how many ViewControllers are in this pane\n- pane.totalViews++;\n+ // remove the pane if there are no view items left\n+ pane.totalViews--;\n+ if (pane.totalViews === 0) {\n+ pane.dispose && pane.dispose();\n+ }\n+ });\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ // count how many ViewControllers are in this pane\n+ pane.totalViews++;\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = pane.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = pane.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+ });\n \n- done();\n });\n }\n }\n@@ -273,7 +278,7 @@ export class Nav extends NavController {\n \n } else {\n // create a new nav pane\n- this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.getBindings(viewCtrl)).then(componentRef => {\n+ this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.bindings).then(componentRef => {\n \n // get the pane reference\n pane = this.newPane;\n@@ -354,17 +359,6 @@ export class Nav extends NavController {\n \n /**\n * @private\n- * TODO\n- * @param {TODO} elementBinder TODO\n- * @param {TODO} id TODO\n- * @return {TODO} TODO\n- */\n-function isComponent(elementBinder, id) {\n- return (elementBinder && elementBinder.componentDirective && elementBinder.componentDirective.metadata.id == id);\n-}\n-\n-/**\n- * @private\n */\n @Directive({selector: 'template[pane-anchor]'})\n class NavPaneAnchor {\n@@ -393,9 +387,9 @@ class NavBarAnchor {\n class ContentAnchor {\n constructor(\n @Host() @Inject(forwardRef(() => Pane)) pane: Pane,\n- viewContainerRef: ViewContainerRef\n+ elementRef: ElementRef\n ) {\n- pane.contentContainerRef = viewContainerRef;\n+ pane.contentAnchorRef = elementRef;\n }\n }\n \ndiff --git a/ionic/components/tabs/tab.ts b/ionic/components/tabs/tab.ts\nindex aa21cad..af5d190 100644\n--- a/ionic/components/tabs/tab.ts\n+++ b/ionic/components/tabs/tab.ts\n@@ -153,40 +153,44 @@ export class Tab extends NavController {\n \n loadContainer(componentType, hostProtoViewRef, viewCtrl, done) {\n \n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, this.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ this.loadNextToAnchor(componentType, this.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = this.tabs.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = this.tabs.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+\n+ });\n \n- done();\n }\n \n }\n@@ -194,10 +198,7 @@ export class Tab extends NavController {\n \n @Directive({selector: 'template[content-anchor]'})\n class TabContentAnchor {\n- constructor(\n- @Host() tab: Tab,\n- viewContainerRef: ViewContainerRef\n- ) {\n- tab.contentContainerRef = viewContainerRef;\n+ constructor(@Host() tab: Tab, elementRef: ElementRef) {\n+ tab.contentAnchorRef = elementRef;\n }\n }\n"]
4
["28e623b294816c4e070971782a75c8697a11966f", "7dc3b70fe40fc7de255a28bb3098bcb8c0d35365", "6f0cf049caa1a7982669ee685e86621452686551", "085ee958c48d695ba50822d8767d615fd9e887fa"]
["ci", "fix", "feat", "refactor"]
lint README,Remove hasmany and belongsto from context menu Signed-off-by: Pranav C <[email protected]>,update CI images from docker buster to bullseye This will break `perf_image` until the new CI image is built due to the newly required `--all-tags` parameter to `docker push` that isn't available for the docker version we run on buster.,correct width when --no-quotes is used
["diff --git a/README.md b/README.md\nindex a163c83..9cd12bc 100644\n--- a/README.md\n+++ b/README.md\n@@ -26,7 +26,7 @@ Ibis has three primary components:\n Ibis aims to be a future-proof solution to interacting with data using Python and can accomplish this goal through its main features:\n \n - **Familiar API**: Ibis\u2019s API design borrows from popular APIs like pandas and dplyr that most users already know and like to use.\n-- **Consistent syntax**: Ibis aims to be universal Python API for tabular data, big or small.\n+- **Consistent syntax**: Ibis aims to be universal Python API for tabular data, big or small.\n - **Deferred execution**: Ibis pushes code execution to the query engine and only moves required data into memory when it has to.\n This leads to more faster, more efficient analytics workflows\n - **Interactive mode**: Ibis also provides an interactive mode, in which users can quickly diagnose problems, do exploratory data analysis, and mock up workflows locally.\n", "diff --git a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\nindex 5bc6f67..aaa297c 100644\n--- a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n@@ -261,37 +261,7 @@\n :size=\"size\"\n @input=\"loadTableData\"\n />\n- <!-- <v-pagination\n- v-if=\"count !== Infinity\"\n- style=\"max-width: 100%\"\n- v-model=\"page\"\n- :length=\"Math.ceil(count / size)\"\n- :total-visible=\"8\"\n- @input=\"loadTableData\"\n- color=\"primary lighten-2\"\n- ></v-pagination>\n- <div v-else class=\"mx-auto d-flex align-center mt-n1 \" style=\"max-width:250px\">\n- <span class=\"caption\" style=\"white-space: nowrap\"> Change page:</span>\n- <v-text-field\n- class=\"ml-1 caption\"\n- :full-width=\"false\"\n- outlined\n- dense\n- hide-details\n- v-model=\"page\"\n- @keydown.enter=\"loadTableData\"\n- type=\"number\"\n- >\n- <template #append>\n- <x-icon tooltip=\"Change page\" small icon.class=\"mt-1\" @click=\"loadTableData\">mdi-keyboard-return\n- </x-icon>\n- </template>\n- </v-text-field>\n- </div>-->\n </template>\n- <!-- <div v-else class=\"d-flex justify-center py-4\">-->\n- <!-- <v-alert type=\"info\" dense class=\"ma-1 flex-shrink-1\">Table is empty</v-alert>-->\n- <!-- </div>-->\n </div>\n \n <spreadsheet-nav-drawer\n@@ -414,9 +384,9 @@\n <span class=\"caption\">Delete Selected Rows</span>\n </v-list-item>\n </template>\n- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n+ <!-- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n <v-divider v-if=\"isEditable && !isLocked\" />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Has Many</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Has Many</span>\n \n <v-list-item v-for=\"(hm,i) in meta.hasMany\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(hm,'hm')\">\n <span class=\"caption text-capitalize\">{{ hm._tn }}</span>\n@@ -425,12 +395,12 @@\n \n <template v-if=\"meta.belongsTo && meta.belongsTo.length\">\n <v-divider />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Belongs To</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Belongs To</span>\n \n <v-list-item v-for=\"(bt,i) in belongsTo\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(bt,'bt')\">\n <span class=\"caption text-capitalize\">{{ bt._rtn }}</span>\n </v-list-item>\n- </template>\n+ </template>-->\n </v-list>\n </v-menu>\n <v-dialog\n", "diff --git a/.circleci/config.yml b/.circleci/config.yml\nindex f8a53ba..c378c7e 100644\n--- a/.circleci/config.yml\n+++ b/.circleci/config.yml\n@@ -336,7 +336,7 @@ jobs:\n # Disabling for now, and tracked further investigations\n # in https://github.com/influxdata/k8s-idpe/issues/3038\n docker_layer_caching: false\n- version: 19.03.14\n+ version: 20.10.7\n - run: |\n sudo apt-get update\n sudo apt-get install -y docker.io\n@@ -355,7 +355,7 @@ jobs:\n BRANCH=$(git rev-parse --abbrev-ref HEAD | tr '/' '.')\n COMMIT_SHA=$(git rev-parse --short HEAD)\n docker build -t quay.io/influxdb/iox:$COMMIT_SHA -t quay.io/influxdb/iox:main -f docker/Dockerfile.iox .\n- docker push quay.io/influxdb/iox\n+ docker push --all-tags quay.io/influxdb/iox\n echo \"export COMMIT_SHA=${COMMIT_SHA}\" >> $BASH_ENV\n - run:\n name: Deploy tags\ndiff --git a/Dockerfile b/Dockerfile\nindex 8c23ea2..1df1fd2 100644\n--- a/Dockerfile\n+++ b/Dockerfile\n@@ -17,7 +17,7 @@ RUN \\\n cp /influxdb_iox/target/release/influxdb_iox /root/influxdb_iox && \\\n du -cshx /usr/local/cargo/registry /usr/local/cargo/git /influxdb_iox/target\n \n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates --no-install-recommends \\\ndiff --git a/docker/Dockerfile.ci b/docker/Dockerfile.ci\nindex db0a8ca..cf9cd15 100644\n--- a/docker/Dockerfile.ci\n+++ b/docker/Dockerfile.ci\n@@ -12,7 +12,7 @@\n \n ARG RUST_VERSION\n # Build actual image used for CI pipeline\n-FROM rust:${RUST_VERSION}-slim-buster\n+FROM rust:${RUST_VERSION}-slim-bullseye\n \n # When https://github.com/rust-lang/rustup/issues/2686 is fixed, run the command added that\n # will install everything in rust-toolchain.toml here so that components are in the container\n@@ -42,7 +42,7 @@ COPY docker/redpanda.gpg /tmp/redpanda.gpg\n # Generated from https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/cfg/setup/bash.deb.sh\n RUN apt-key add /tmp/redpanda.gpg \\\n && rm /tmp/redpanda.gpg \\\n- && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=buster&version=10&arch=x86_64\" \\\n+ && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=bullseye&version=10&arch=x86_64\" \\\n > /etc/apt/sources.list.d/vectorized-redpanda.list \\\n && apt-get update \\\n && apt-get install -y redpanda \\\ndiff --git a/docker/Dockerfile.iox b/docker/Dockerfile.iox\nindex 42414db..ae1f38e 100644\n--- a/docker/Dockerfile.iox\n+++ b/docker/Dockerfile.iox\n@@ -1,7 +1,7 @@\n ###\n # Dockerfile used for deploying IOx\n ##\n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates gettext-base --no-install-recommends \\\n", "diff --git a/src/output/grid.rs b/src/output/grid.rs\nindex 37f6c57..ce989e5 100644\n--- a/src/output/grid.rs\n+++ b/src/output/grid.rs\n@@ -8,6 +8,8 @@ use crate::output::file_name::{Classify, Options as FileStyle};\n use crate::output::file_name::{EmbedHyperlinks, ShowIcons};\n use crate::theme::Theme;\n \n+use super::file_name::QuoteStyle;\n+\n #[derive(PartialEq, Eq, Debug, Copy, Clone)]\n pub struct Options {\n pub across: bool,\n@@ -55,27 +57,34 @@ impl<'a> Render<'a> {\n } else {\n 0\n };\n-\n- let space_filename_offset = if file.name.contains(' ') || file.name.contains('\\'') {\n- 2\n- } else {\n- 0\n+ let space_filename_offset = match self.file_style.quote_style {\n+ QuoteStyle::QuoteSpaces if file.name.contains(' ') => 2,\n+ QuoteStyle::NoQuotes => 0,\n+ _ => 0, // Default case\n };\n-\n let contents = filename.paint();\n- #[rustfmt::skip]\n let width = match (\n filename.options.embed_hyperlinks,\n filename.options.show_icons,\n ) {\n- ( EmbedHyperlinks::On, ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing) )\n- => filename.bare_width() + classification_width + 1 + (spacing as usize) + space_filename_offset,\n- ( EmbedHyperlinks::On, ShowIcons::Never )\n- => filename.bare_width() + classification_width + space_filename_offset,\n- ( EmbedHyperlinks::Off, ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing) )\n- => filename.bare_width() + 1 + (spacing as usize) + space_filename_offset,\n- ( EmbedHyperlinks::Off, _ )\n- => *contents.width(),\n+ (\n+ EmbedHyperlinks::On,\n+ ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing),\n+ ) => {\n+ filename.bare_width()\n+ + classification_width\n+ + 1\n+ + (spacing as usize)\n+ + space_filename_offset\n+ }\n+ (EmbedHyperlinks::On, ShowIcons::Never) => {\n+ filename.bare_width() + classification_width + space_filename_offset\n+ }\n+ (\n+ EmbedHyperlinks::Off,\n+ ShowIcons::Always(spacing) | ShowIcons::Automatic(spacing),\n+ ) => filename.bare_width() + 1 + (spacing as usize) + space_filename_offset,\n+ (EmbedHyperlinks::Off, _) => *contents.width(),\n };\n \n grid.add(tg::Cell {\n"]
4
["cab2cf4d3fffb0ec2b56d455e67ac5fa992b4104", "7dbbb64c45506ef634180638db800b6d9535523d", "640cd88df3069a97d8244398414338dd317c5470", "61eaa2d0cca9bd27d6c5f0a8f9b34200b77fdbb0"]
["docs", "refactor", "ci", "fix"]
use ng2 loadNextToLocation,fix "types" field in dist,use an action for issue assignment,fixa few issues
["diff --git a/ionic/components/nav/nav-controller.ts b/ionic/components/nav/nav-controller.ts\nindex 8e23c4c..37ac0f4 100644\n--- a/ionic/components/nav/nav-controller.ts\n+++ b/ionic/components/nav/nav-controller.ts\n@@ -527,41 +527,13 @@ export class NavController extends Ion {\n * @private\n * TODO\n */\n- createViewComponentRef(type, hostProtoViewRef, viewContainer, viewCtrlBindings) {\n- let bindings = this.bindings.concat(viewCtrlBindings);\n-\n- // the same guts as DynamicComponentLoader.loadNextToLocation\n- var hostViewRef =\n- viewContainer.createHostView(hostProtoViewRef, viewContainer.length, bindings);\n- var newLocation = this._viewManager.getHostElement(hostViewRef);\n- var component = this._viewManager.getComponent(newLocation);\n-\n- var dispose = () => {\n- var index = viewContainer.indexOf(hostViewRef);\n- if (index !== -1) {\n- viewContainer.remove(index);\n- }\n- };\n-\n- // TODO: make-shift ComponentRef_, this is pretty much going to\n- // break in future versions of ng2, keep an eye on it\n- return {\n- location: newLocation,\n- instance: component,\n- dispose: dispose\n- };\n- }\n-\n- /**\n- * @private\n- * TODO\n- */\n- getBindings(viewCtrl) {\n- // create bindings to this ViewController and its NavParams\n- return this.bindings.concat(Injector.resolve([\n+ loadNextToAnchor(type, location, viewCtrl) {\n+ let bindings = this.bindings.concat(Injector.resolve([\n bind(ViewController).toValue(viewCtrl),\n bind(NavParams).toValue(viewCtrl.params),\n ]));\n+\n+ return this._loader.loadNextToLocation(type, location, bindings);\n }\n \n /**\ndiff --git a/ionic/components/nav/nav.ts b/ionic/components/nav/nav.ts\nindex a98a4ef..063eeb9 100644\n--- a/ionic/components/nav/nav.ts\n+++ b/ionic/components/nav/nav.ts\n@@ -192,65 +192,70 @@ export class Nav extends NavController {\n if (structure.tabs) {\n // the component being loaded is an <ion-tabs>\n // Tabs is essentially a pane, cuz it has its own navbar and content containers\n- let contentContainerRef = this._viewManager.getViewContainer(this.anchorElementRef());\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, contentContainerRef, this.getBindings(viewCtrl));\n- viewComponentRef.instance._paneView = true;\n+ this.loadNextToAnchor(componentType, this.anchorElementRef(), viewCtrl).then(componentRef => {\n \n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ componentRef.instance._paneView = true;\n+\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n+\n+ viewCtrl.onReady().then(() => {\n+ done();\n+ });\n \n- viewCtrl.onReady().then(() => {\n- done();\n });\n \n } else {\n // normal ion-view going into pane\n this.getPane(structure, viewCtrl, (pane) => {\n // add the content of the view into the pane's content area\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, pane.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n+ this.loadNextToAnchor(componentType, pane.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // remove the pane if there are no view items left\n- pane.totalViews--;\n- if (pane.totalViews === 0) {\n- pane.dispose && pane.dispose();\n- }\n- });\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n \n- // count how many ViewControllers are in this pane\n- pane.totalViews++;\n+ // remove the pane if there are no view items left\n+ pane.totalViews--;\n+ if (pane.totalViews === 0) {\n+ pane.dispose && pane.dispose();\n+ }\n+ });\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ // count how many ViewControllers are in this pane\n+ pane.totalViews++;\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = pane.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = pane.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+ });\n \n- done();\n });\n }\n }\n@@ -273,7 +278,7 @@ export class Nav extends NavController {\n \n } else {\n // create a new nav pane\n- this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.getBindings(viewCtrl)).then(componentRef => {\n+ this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.bindings).then(componentRef => {\n \n // get the pane reference\n pane = this.newPane;\n@@ -354,17 +359,6 @@ export class Nav extends NavController {\n \n /**\n * @private\n- * TODO\n- * @param {TODO} elementBinder TODO\n- * @param {TODO} id TODO\n- * @return {TODO} TODO\n- */\n-function isComponent(elementBinder, id) {\n- return (elementBinder && elementBinder.componentDirective && elementBinder.componentDirective.metadata.id == id);\n-}\n-\n-/**\n- * @private\n */\n @Directive({selector: 'template[pane-anchor]'})\n class NavPaneAnchor {\n@@ -393,9 +387,9 @@ class NavBarAnchor {\n class ContentAnchor {\n constructor(\n @Host() @Inject(forwardRef(() => Pane)) pane: Pane,\n- viewContainerRef: ViewContainerRef\n+ elementRef: ElementRef\n ) {\n- pane.contentContainerRef = viewContainerRef;\n+ pane.contentAnchorRef = elementRef;\n }\n }\n \ndiff --git a/ionic/components/tabs/tab.ts b/ionic/components/tabs/tab.ts\nindex aa21cad..af5d190 100644\n--- a/ionic/components/tabs/tab.ts\n+++ b/ionic/components/tabs/tab.ts\n@@ -153,40 +153,44 @@ export class Tab extends NavController {\n \n loadContainer(componentType, hostProtoViewRef, viewCtrl, done) {\n \n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, this.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ this.loadNextToAnchor(componentType, this.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = this.tabs.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = this.tabs.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+\n+ });\n \n- done();\n }\n \n }\n@@ -194,10 +198,7 @@ export class Tab extends NavController {\n \n @Directive({selector: 'template[content-anchor]'})\n class TabContentAnchor {\n- constructor(\n- @Host() tab: Tab,\n- viewContainerRef: ViewContainerRef\n- ) {\n- tab.contentContainerRef = viewContainerRef;\n+ constructor(@Host() tab: Tab, elementRef: ElementRef) {\n+ tab.contentAnchorRef = elementRef;\n }\n }\n", "diff --git a/scripts/prepare.js b/scripts/prepare.js\nindex 4bab09b..55f459b 100644\n--- a/scripts/prepare.js\n+++ b/scripts/prepare.js\n@@ -96,7 +96,6 @@ async function prepare() {\n delete json.private\n delete json.scripts\n delete json.devDependencies\n- delete json.types\n \n // Add \"postinstall\" script for donations.\n if (/(native|core)$/.test(name))\n@@ -128,6 +127,7 @@ async function prepare() {\n else {\n json.main = json.main.replace(/^dist\\//, '')\n if (json.main.endsWith('.cjs.js')) {\n+ json.types = json.main.replace('.cjs.js', '.d.ts')\n json.module = json.main.replace('.cjs', '')\n }\n }\n", "diff --git a/.github/workflows/assign.yml b/.github/workflows/assign.yml\nindex 29d92a8..758874e 100644\n--- a/.github/workflows/assign.yml\n+++ b/.github/workflows/assign.yml\n@@ -8,8 +8,6 @@ jobs:\n runs-on: ubuntu-latest\n if: ${{ github.event.comment.body == '/take' }}\n steps:\n- - uses: actions/checkout@v2\n- - name: Assign issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }}\n- run: gh issue edit ${{ github.event.issue.number }} --add-assignee \"${{ github.event.comment.user.login }}\"\n- env:\n- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+ - uses: pozil/[email protected]\n+ with:\n+ assignees: ${{ github.event.comment.user.login }}\n", "diff --git a/README.md b/README.md\nindex d944d22..5099f03 100644\n--- a/README.md\n+++ b/README.md\n@@ -10,9 +10,8 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n \n <!-- toc -->\n \n-* [Motivation](#motivation)\n+* [Introduction](#introduction)\n * [Installation](#installation)\n- + [Setting up a quick project](#setting-up-a-quick-project)\n * [Usage](#usage)\n + [Creating stores](#creating-stores)\n + [Creating reactive views](#creating-reactive-views)\n@@ -35,12 +34,14 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n Easy State consists of two wrapper functions only. `store` creates state stores and `view` creates reactive components, which re-render whenever state stores are mutated. The rest is just plain JavaScript.\n \n ```js\n-import React, from 'react'\n+import React from 'react'\n import { store, view } from 'react-easy-state'\n \n+// stores are normal objects\n const clock = store({ time: new Date() })\n setInterval(() => clock.time = new Date(), 1000)\n \n+// reactive components re-render on store mutations\n function ClockComp () {\n return <div>{clock.time}</div>\n }\n"]
4
["085ee958c48d695ba50822d8767d615fd9e887fa", "f14ef3809f456aadd73523e47cb16c5d15e9a9df", "fb3a231b29bc8bff9270b99dd4aff9dad599f21f", "b8a664c1b10f4e30a3e221a14211a3cdaf90b7f4"]
["refactor", "build", "ci", "docs"]
licensing,extract lambdas,Template using kube api version Signed-off-by: rjshrjndrn <[email protected]>,right side menus
["diff --git a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\nindex a4aee6b..bb523fa 100644\n--- a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n+++ b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n@@ -1,3 +1,18 @@\n+/*\n+ * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n package io.atomix.cluster.messaging.impl;\n \n import static org.assertj.core.api.Assertions.assertThat;\n", "diff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\nindex 6ee5797..bcfcc72 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n@@ -224,7 +224,6 @@ public final class AsyncSnapshotDirector extends Actor\n private void takeSnapshot() {\n final var transientSnapshotFuture =\n stateController.takeTransientSnapshot(lowerBoundSnapshotPosition);\n-\n transientSnapshotFuture.onComplete(\n (optionalTransientSnapshot, snapshotTakenError) -> {\n if (snapshotTakenError != null) {\n@@ -237,27 +236,31 @@ public final class AsyncSnapshotDirector extends Actor\n takingSnapshot = false;\n return;\n }\n- pendingSnapshot = optionalTransientSnapshot.get();\n- onRecovered();\n-\n- final ActorFuture<Long> lastWrittenPosition =\n- streamProcessor.getLastWrittenPositionAsync();\n- actor.runOnCompletion(\n- lastWrittenPosition,\n- (endPosition, error) -> {\n- if (error == null) {\n- LOG.info(LOG_MSG_WAIT_UNTIL_COMMITTED, endPosition, commitPosition);\n- lastWrittenEventPosition = endPosition;\n- persistingSnapshot = false;\n- persistSnapshotIfLastWrittenPositionCommitted();\n- } else {\n- resetStateOnFailure();\n- LOG.error(ERROR_MSG_ON_RESOLVE_WRITTEN_POS, error);\n- }\n- });\n+ onTransientSnapshotTaken(optionalTransientSnapshot.get());\n });\n }\n \n+ private void onTransientSnapshotTaken(final TransientSnapshot transientSnapshot) {\n+\n+ pendingSnapshot = transientSnapshot;\n+ onRecovered();\n+\n+ final ActorFuture<Long> lastWrittenPosition = streamProcessor.getLastWrittenPositionAsync();\n+ actor.runOnCompletion(lastWrittenPosition, this::onLastWrittenPositionReceived);\n+ }\n+\n+ private void onLastWrittenPositionReceived(final Long endPosition, final Throwable error) {\n+ if (error == null) {\n+ LOG.info(LOG_MSG_WAIT_UNTIL_COMMITTED, endPosition, commitPosition);\n+ lastWrittenEventPosition = endPosition;\n+ persistingSnapshot = false;\n+ persistSnapshotIfLastWrittenPositionCommitted();\n+ } else {\n+ resetStateOnFailure();\n+ LOG.error(ERROR_MSG_ON_RESOLVE_WRITTEN_POS, error);\n+ }\n+ }\n+\n private void onRecovered() {\n if (healthStatus != HealthStatus.HEALTHY) {\n healthStatus = HealthStatus.HEALTHY;\n", "diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml\nindex c014f34..2a12e0d 100644\n--- a/.github/workflows/api-ee.yaml\n+++ b/.github/workflows/api-ee.yaml\n@@ -8,7 +8,7 @@ on:\n default: 'false'\n push:\n branches:\n- - dev\n+ - test_ci\n paths:\n - ee/api/**\n - api/**\n@@ -112,7 +112,8 @@ jobs:\n # Deploy command\n kubectl config set-context --namespace=app --current\n kubectl config get-contexts\n- helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -f -\n+ k_version=$(kubectl version --short 2>/dev/null | awk '/Server/{print $NF}')\n+ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -\n env:\n DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}\n # We're not passing -ee flag, because helm will add that.\n", "diff --git a/ionic/components/menu/menu-types.scss b/ionic/components/menu/menu-types.scss\nindex dbbfdda..5e4f990 100644\n--- a/ionic/components/menu/menu-types.scss\n+++ b/ionic/components/menu/menu-types.scss\n@@ -35,3 +35,7 @@ ion-menu[type=overlay] {\n }\n }\n }\n+\n+ion-menu[type=overlay][side=right] {\n+ left: 8px;\n+}\ndiff --git a/ionic/components/menu/menu-types.ts b/ionic/components/menu/menu-types.ts\nindex 360aeb4..0666a38 100644\n--- a/ionic/components/menu/menu-types.ts\n+++ b/ionic/components/menu/menu-types.ts\n@@ -10,7 +10,7 @@ import {Animation} from 'ionic/animations/animation';\n */\n export class MenuType {\n \n- constructor(menu: Menu) {\n+ constructor() {\n this.open = new Animation();\n this.close = new Animation();\n }\n@@ -88,16 +88,17 @@ class MenuRevealType extends MenuType {\n let duration = 250;\n \n let openedX = (menu.width() * (menu.side == 'right' ? -1 : 1)) + 'px';\n+ let closedX = '0px'\n \n this.open.easing(easing).duration(duration);\n this.close.easing(easing).duration(duration);\n \n let contentOpen = new Animation(menu.getContentElement());\n- contentOpen.fromTo(TRANSLATE_X, CENTER, openedX);\n+ contentOpen.fromTo(TRANSLATE_X, closedX, openedX);\n this.open.add(contentOpen);\n \n let contentClose = new Animation(menu.getContentElement());\n- contentClose.fromTo(TRANSLATE_X, openedX, CENTER);\n+ contentClose.fromTo(TRANSLATE_X, openedX, closedX);\n this.close.add(contentClose);\n }\n }\n@@ -117,13 +118,23 @@ class MenuOverlayType extends MenuType {\n let duration = 250;\n let backdropOpacity = 0.5;\n \n- let closedX = (menu.width() * (menu.side == 'right' ? 1 : -1)) + 'px';\n+ let closedX, openedX;\n+ if (menu.side == 'right') {\n+ // right side\n+ closedX = menu.platform.width() + 'px';\n+ openedX = (menu.platform.width() - menu.width() - 8) + 'px';\n+\n+ } else {\n+ // left side\n+ closedX = -menu.width() + 'px';\n+ openedX = '8px';\n+ }\n \n this.open.easing(easing).duration(duration);\n this.close.easing(easing).duration(duration);\n \n let menuOpen = new Animation(menu.getMenuElement());\n- menuOpen.fromTo(TRANSLATE_X, closedX, '8px');\n+ menuOpen.fromTo(TRANSLATE_X, closedX, openedX);\n this.open.add(menuOpen);\n \n let backdropOpen = new Animation(menu.getBackdropElement());\n@@ -131,7 +142,7 @@ class MenuOverlayType extends MenuType {\n this.open.add(backdropOpen);\n \n let menuClose = new Animation(menu.getMenuElement());\n- menuClose.fromTo(TRANSLATE_X, '8px', closedX);\n+ menuClose.fromTo(TRANSLATE_X, openedX, closedX);\n this.close.add(menuClose);\n \n let backdropClose = new Animation(menu.getBackdropElement());\n@@ -144,4 +155,3 @@ Menu.register('overlay', MenuOverlayType);\n \n const OPACITY = 'opacity';\n const TRANSLATE_X = 'translateX';\n-const CENTER = '0px';\ndiff --git a/ionic/components/menu/menu.ts b/ionic/components/menu/menu.ts\nindex d911b2e..e980069 100644\n--- a/ionic/components/menu/menu.ts\n+++ b/ionic/components/menu/menu.ts\n@@ -4,6 +4,7 @@ import {Ion} from '../ion';\n import {IonicApp} from '../app/app';\n import {IonicConfig} from '../../config/config';\n import {IonicComponent} from '../../config/annotations';\n+import {IonicPlatform} from '../../platform/platform';\n import * as gestures from './menu-gestures';\n \n \n@@ -35,10 +36,16 @@ import * as gestures from './menu-gestures';\n })\n export class Menu extends Ion {\n \n- constructor(app: IonicApp, elementRef: ElementRef, config: IonicConfig) {\n+ constructor(\n+ app: IonicApp,\n+ elementRef: ElementRef,\n+ config: IonicConfig,\n+ platform: IonicPlatform\n+ ) {\n super(elementRef, config);\n-\n this.app = app;\n+ this.platform = platform;\n+\n this.opening = new EventEmitter('opening');\n this.isOpen = false;\n this._disableTime = 0;\n@@ -46,9 +53,9 @@ export class Menu extends Ion {\n \n onInit() {\n super.onInit();\n- this.contentElement = (this.content instanceof Node) ? this.content : this.content.getNativeElement();\n+ this._cntEle = (this.content instanceof Node) ? this.content : this.content.getNativeElement();\n \n- if (!this.contentElement) {\n+ if (!this._cntEle) {\n return console.error('Menu: must have a [content] element to listen for drag events on. Example:\\n\\n<ion-menu [content]=\"content\"></ion-menu>\\n\\n<ion-content #content></ion-content>');\n }\n \n@@ -61,8 +68,8 @@ export class Menu extends Ion {\n this._initGesture();\n this._initType(this.type);\n \n- this.contentElement.classList.add('menu-content');\n- this.contentElement.classList.add('menu-content-' + this.type);\n+ this._cntEle.classList.add('menu-content');\n+ this._cntEle.classList.add('menu-content-' + this.type);\n \n let self = this;\n this.onContentClick = function(ev) {\n@@ -161,11 +168,11 @@ export class Menu extends Ion {\n \n this.isOpen = isOpen;\n \n- this.contentElement.classList[isOpen ? 'add' : 'remove']('menu-content-open');\n+ this._cntEle.classList[isOpen ? 'add' : 'remove']('menu-content-open');\n \n- this.contentElement.removeEventListener('click', this.onContentClick);\n+ this._cntEle.removeEventListener('click', this.onContentClick);\n if (isOpen) {\n- this.contentElement.addEventListener('click', this.onContentClick);\n+ this._cntEle.addEventListener('click', this.onContentClick);\n \n } else {\n this.getNativeElement().classList.remove('show-menu');\n@@ -220,7 +227,7 @@ export class Menu extends Ion {\n * @return {Element} The Menu's associated content element.\n */\n getContentElement() {\n- return this.contentElement;\n+ return this._cntEle;\n }\n \n /**\n@@ -239,7 +246,7 @@ export class Menu extends Ion {\n this.app.unregister(this.id);\n this._gesture && this._gesture.destroy();\n this._type && this._type.onDestroy();\n- this.contentElement = null;\n+ this._cntEle = null;\n }\n \n }\ndiff --git a/ionic/components/menu/test/basic/index.ts b/ionic/components/menu/test/basic/index.ts\nindex 698cec4..65952ff 100644\n--- a/ionic/components/menu/test/basic/index.ts\n+++ b/ionic/components/menu/test/basic/index.ts\n@@ -36,9 +36,9 @@ class E2EApp {\n ];\n }\n \n- openPage(menu, page) {\n+ openPage(page) {\n // close the menu when clicking a link from the menu\n- menu.close();\n+ this.app.getComponent('leftMenu').close();\n \n // Reset the content nav to have just this page\n // we wouldn't want the back button to show in this scenario\ndiff --git a/ionic/components/menu/test/basic/main.html b/ionic/components/menu/test/basic/main.html\nindex 9bdeb5c..4905ae6 100644\n--- a/ionic/components/menu/test/basic/main.html\n+++ b/ionic/components/menu/test/basic/main.html\n@@ -1,4 +1,4 @@\n-<ion-menu #menu [content]=\"content\">\n+<ion-menu [content]=\"content\" id=\"leftMenu\">\n \n <ion-toolbar secondary>\n <ion-title>Left Menu</ion-title>\n@@ -8,11 +8,35 @@\n \n <ion-list>\n \n- <button ion-item *ng-for=\"#p of pages\" (click)=\"openPage(menu, p)\">\n+ <button ion-item *ng-for=\"#p of pages\" (click)=\"openPage(p)\">\n {{p.title}}\n </button>\n \n- <button ion-item menu-toggle no-forward-icon class=\"e2eCloseMenu\">\n+ <button ion-item menu-toggle=\"leftMenu\" no-forward-icon class=\"e2eCloseMenu\">\n+ Close Menu\n+ </button>\n+\n+ </ion-list>\n+ </ion-content>\n+\n+</ion-menu>\n+\n+\n+<ion-menu side=\"right\" [content]=\"content\" id=\"rightMenu\">\n+\n+ <ion-toolbar secondary>\n+ <ion-title>Right Menu</ion-title>\n+ </ion-toolbar>\n+\n+ <ion-content>\n+\n+ <ion-list>\n+\n+ <button ion-item *ng-for=\"#p of pages\" (click)=\"openPage(p)\">\n+ {{p.title}}\n+ </button>\n+\n+ <button ion-item menu-toggle=\"rightMenu\" no-forward-icon class=\"e2eCloseMenu\">\n Close Menu\n </button>\n \ndiff --git a/ionic/components/menu/test/basic/page1.html b/ionic/components/menu/test/basic/page1.html\nindex 1881d9e..2bc5c79 100644\n--- a/ionic/components/menu/test/basic/page1.html\n+++ b/ionic/components/menu/test/basic/page1.html\n@@ -1,7 +1,7 @@\n \n <ion-navbar *navbar>\n \n- <a menu-toggle>\n+ <a menu-toggle=\"leftMenu\">\n <icon menu></icon>\n </a>\n \n@@ -21,19 +21,23 @@\n </button>\n </ion-nav-items>\n \n- <a menu-toggle secondary>\n+ <a menu-toggle=\"rightMenu\" secondary>\n <icon menu></icon>\n </a>\n \n </ion-navbar>\n \n \n-<ion-content #content padding>\n+<ion-content padding>\n \n <h3>Page 1</h3>\n \n <p>\n- <button class=\"e2eContentToggleMenu\" menu-toggle>Toggle Menu</button>\n+ <button class=\"e2eContentToggleMenu\" menu-toggle=\"leftMenu\">Toggle Left Menu</button>\n+ </p>\n+\n+ <p>\n+ <button class=\"e2eContentToggleMenu\" menu-toggle=\"rightMenu\">Toggle Right Menu</button>\n </p>\n \n <f></f><f></f><f></f><f></f><f></f><f></f><f></f><f></f>\ndiff --git a/ionic/components/menu/test/basic/page2.html b/ionic/components/menu/test/basic/page2.html\nindex 9801c4f..098f3e1 100644\n--- a/ionic/components/menu/test/basic/page2.html\n+++ b/ionic/components/menu/test/basic/page2.html\n@@ -1,7 +1,7 @@\n \n <ion-navbar *navbar>\n \n- <a menu-toggle>\n+ <a menu-toggle=\"leftMenu\">\n <icon menu></icon>\n </a>\n \n@@ -11,12 +11,12 @@\n \n </ion-navbar>\n \n-<ion-content #content padding>\n+<ion-content padding>\n \n <h3>Page 2</h3>\n \n <p>\n- <button menu-toggle class=\"e2eContentToggleMenu\">Toggle Menu</button>\n+ <button menu-toggle=\"leftMenu\" class=\"e2eContentToggleMenu\">Toggle Left Menu</button>\n </p>\n \n <p>\ndiff --git a/ionic/components/menu/test/basic/page3.html b/ionic/components/menu/test/basic/page3.html\nindex a2d65e2..079a3e9 100644\n--- a/ionic/components/menu/test/basic/page3.html\n+++ b/ionic/components/menu/test/basic/page3.html\n@@ -1,7 +1,7 @@\n \n <ion-navbar *navbar>\n \n- <a menu-toggle>\n+ <a menu-toggle=\"leftMenu\">\n <icon menu></icon>\n </a>\n \n@@ -12,12 +12,12 @@\n </ion-navbar>\n \n \n-<ion-content #content padding>\n+<ion-content padding>\n \n <h3>Page 3</h3>\n \n <p>\n- <button menu-toggle>Toggle Menu</button>\n+ <button menu-toggle=\"leftMenu\">Toggle Left Menu</button>\n </p>\n \n <f></f><f></f><f></f><f></f><f></f><f></f><f></f><f></f>\ndiff --git a/ionic/components/toolbar/modes/md.scss b/ionic/components/toolbar/modes/md.scss\nindex 984e758..339169a 100644\n--- a/ionic/components/toolbar/modes/md.scss\n+++ b/ionic/components/toolbar/modes/md.scss\n@@ -43,6 +43,12 @@ $toolbar-md-button-font-size: 1.4rem !default;\n }\n }\n \n+ [menu-toggle][secondary],\n+ [menu-toggle][secondary].activated {\n+ margin: 0 2px;\n+ min-width: 28px;\n+ }\n+\n }\n \n ion-title {\n"]
4
["cbe62140ce219da84772e21e7cfb4b5c2a25c1b8", "14abf5c31523a551134aebe9e8f3505ef26ed421", "c3531347fe5a4cc82d426db195026a5bdad15e7a", "1a60540f2bcda48d33f015e31f3728ac2c59a159"]
["docs", "refactor", "ci", "feat"]
fixed start types for size and opacity,add test case with multiple partitions for message,auto focus inputs in survey form,remove unnecessary lines from verify-wal test
["diff --git a/core/main/src/Core/Particle.ts b/core/main/src/Core/Particle.ts\nindex 1aa6fba..6ea6ffc 100644\n--- a/core/main/src/Core/Particle.ts\n+++ b/core/main/src/Core/Particle.ts\n@@ -271,7 +271,7 @@ export class Particle implements IParticle {\n }\n }\n \n- const sizeAnimation = this.options.size.animation;\n+ const sizeAnimation = sizeOptions.animation;\n \n if (sizeAnimation.enable) {\n this.size.status = AnimationStatus.increasing;\n@@ -279,7 +279,8 @@ export class Particle implements IParticle {\n if (!randomSize) {\n switch (sizeAnimation.startValue) {\n case StartValueType.min:\n- this.size.value = sizeAnimation.minimumValue * pxRatio;\n+ this.size.value = NumberUtils.getRangeMin(sizeOptions.value) * pxRatio;\n+ this.size.status = AnimationStatus.increasing;\n \n break;\n \n@@ -287,11 +288,14 @@ export class Particle implements IParticle {\n this.size.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(sizeAnimation.minimumValue * pxRatio, this.size.value)\n );\n+ this.size.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.size.value = NumberUtils.getRangeMax(sizeOptions.value) * pxRatio;\n this.size.status = AnimationStatus.decreasing;\n \n break;\n@@ -393,7 +397,8 @@ export class Particle implements IParticle {\n if (!randomOpacity) {\n switch (opacityAnimation.startValue) {\n case StartValueType.min:\n- this.opacity.value = opacityAnimation.minimumValue;\n+ this.opacity.value = NumberUtils.getRangeMin(this.opacity.value);\n+ this.opacity.status = AnimationStatus.increasing;\n \n break;\n \n@@ -401,11 +406,14 @@ export class Particle implements IParticle {\n this.opacity.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(opacityAnimation.minimumValue, this.opacity.value)\n );\n+ this.opacity.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.opacity.value = NumberUtils.getRangeMax(this.opacity.value);\n this.opacity.status = AnimationStatus.decreasing;\n \n break;\ndiff --git a/presets/confetti/src/options.ts b/presets/confetti/src/options.ts\nindex 7fc6225..a713425 100644\n--- a/presets/confetti/src/options.ts\n+++ b/presets/confetti/src/options.ts\n@@ -28,7 +28,7 @@ export const loadOptions = (confettiOptions: RecursivePartial<IConfettiOptions>)\n animation: {\n enable: true,\n minimumValue: 0,\n- speed: 2,\n+ speed: 0.5,\n startValue: \"max\",\n destroy: \"min\",\n },\n", "diff --git a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\nindex 693d1da..e3552d4 100644\n--- a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n+++ b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n@@ -74,7 +74,7 @@ public class SubscriptionCommandSender {\n new CloseWorkflowInstanceSubscriptionCommand();\n \n private final ClientTransport subscriptionClient;\n- private final IntArrayList partitionIds;\n+ private final IntArrayList partitionIds = new IntArrayList();\n \n private int partitionId;\n private TopologyPartitionListenerImpl partitionListener;\n@@ -82,7 +82,6 @@ public class SubscriptionCommandSender {\n public SubscriptionCommandSender(\n final ClusterCfg clusterCfg, final ClientTransport subscriptionClient) {\n this.subscriptionClient = subscriptionClient;\n- partitionIds = new IntArrayList();\n partitionIds.addAll(clusterCfg.getPartitionIds());\n }\n \n@@ -100,7 +99,8 @@ public class SubscriptionCommandSender {\n final DirectBuffer messageName,\n final DirectBuffer correlationKey) {\n \n- final int subscriptionPartitionId = getSubscriptionPartitionId(correlationKey);\n+ final int subscriptionPartitionId =\n+ SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n \n openMessageSubscriptionCommand.setSubscriptionPartitionId(subscriptionPartitionId);\n openMessageSubscriptionCommand.setWorkflowInstanceKey(workflowInstanceKey);\n@@ -111,14 +111,6 @@ public class SubscriptionCommandSender {\n return sendSubscriptionCommand(subscriptionPartitionId, openMessageSubscriptionCommand);\n }\n \n- private int getSubscriptionPartitionId(final DirectBuffer correlationKey) {\n- if (partitionIds == null) {\n- throw new IllegalStateException(\"no partition ids available\");\n- }\n-\n- return SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n- }\n-\n public boolean openWorkflowInstanceSubscription(\n final long workflowInstanceKey,\n final long elementInstanceKey,\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\nindex 4baed4f..838c9ca 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n@@ -36,7 +36,6 @@ import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n import io.zeebe.protocol.clientapi.RecordType;\n import io.zeebe.protocol.clientapi.ValueType;\n-import io.zeebe.protocol.impl.SubscriptionUtil;\n import io.zeebe.protocol.intent.DeploymentIntent;\n import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n@@ -44,7 +43,6 @@ import io.zeebe.protocol.intent.WorkflowInstanceSubscriptionIntent;\n import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n import io.zeebe.test.util.record.RecordingExporter;\n-import io.zeebe.util.buffer.BufferUtil;\n import java.util.List;\n import java.util.stream.Collectors;\n import org.agrona.DirectBuffer;\n@@ -171,39 +169,6 @@ public class MessageCatchElementTest {\n }\n \n @Test\n- public void shouldOpenMessageSubscriptionsOnSamePartition() {\n- // given\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n-\n- final String correlationKey = \"order-123\";\n-\n- final PartitionTestClient workflowPartition = apiRule.partitionClient(partitionIds.get(0));\n- final PartitionTestClient subscriptionPartition =\n- apiRule.partitionClient(getPartitionId(correlationKey));\n-\n- testClient.deploy(CATCH_EVENT_WORKFLOW);\n-\n- // when\n- final long workflowInstanceKey1 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- final long workflowInstanceKey2 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- // then\n- final List<Record<MessageSubscriptionRecordValue>> subscriptions =\n- subscriptionPartition\n- .receiveMessageSubscriptions()\n- .withIntent(MessageSubscriptionIntent.OPENED)\n- .limit(2)\n- .collect(Collectors.toList());\n-\n- assertThat(subscriptions)\n- .extracting(s -> s.getValue().getWorkflowInstanceKey())\n- .contains(workflowInstanceKey1, workflowInstanceKey2);\n- }\n-\n- @Test\n public void shouldOpenWorkflowInstanceSubscription() {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", \"order-123\"));\n@@ -352,10 +317,4 @@ public class MessageCatchElementTest {\n .exists())\n .isTrue();\n }\n-\n- private int getPartitionId(final String correlationKey) {\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n- return SubscriptionUtil.getSubscriptionPartitionId(\n- BufferUtil.wrapString(correlationKey), partitionIds.size());\n- }\n }\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..cf8261a\n--- /dev/null\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,134 @@\n+/*\n+ * Zeebe Broker Core\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * This program is free software: you can redistribute it and/or modify\n+ * it under the terms of the GNU Affero General Public License as published by\n+ * the Free Software Foundation, either version 3 of the License, or\n+ * (at your option) any later version.\n+ *\n+ * This program is distributed in the hope that it will be useful,\n+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\n+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n+ * GNU Affero General Public License for more details.\n+ *\n+ * You should have received a copy of the GNU Affero General Public License\n+ * along with this program. If not, see <http://www.gnu.org/licenses/>.\n+ */\n+package io.zeebe.broker.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.impl.SubscriptionUtil;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n+import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import io.zeebe.util.buffer.BufferUtil;\n+import java.util.List;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"receive-message\")\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+\n+ public ClientApiRule apiRule = new ClientApiRule(brokerRule::getClientAddress);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(apiRule);\n+\n+ private PartitionTestClient testClient;\n+\n+ @Before\n+ public void init() {\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_0)).isEqualTo(0);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_1)).isEqualTo(1);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_2)).isEqualTo(2);\n+\n+ testClient = apiRule.partitionClient();\n+\n+ testClient.deploy(WORKFLOW);\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ apiRule\n+ .partitionClient(0)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_0, asMsgPack(\"p\", \"p0\"));\n+ apiRule\n+ .partitionClient(1)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_1, asMsgPack(\"p\", \"p1\"));\n+ apiRule\n+ .partitionClient(2)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_2, asMsgPack(\"p\", \"p2\"));\n+\n+ // when\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ private int getPartitionId(final String correlationKey) {\n+ final List<Integer> partitionIds = apiRule.getPartitionIds();\n+ return SubscriptionUtil.getSubscriptionPartitionId(\n+ BufferUtil.wrapString(correlationKey), partitionIds.size());\n+ }\n+}\ndiff --git a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\nindex dac11a2..e2b8397 100644\n--- a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n+++ b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n@@ -329,6 +329,7 @@ public class PartitionTestClient {\n final String messageName, final String correlationKey, final byte[] payload, final long ttl) {\n return apiRule\n .createCmdRequest()\n+ .partitionId(partitionId)\n .type(ValueType.MESSAGE, MessageIntent.PUBLISH)\n .command()\n .put(\"name\", messageName)\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\nindex 9a122d9..b7db67e 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n@@ -619,14 +619,9 @@ public class BrokerReprocessingTest {\n }\n \n @Test\n- public void shouldCorrelateMessageAfterRestartIfEnteredBeforeA() throws Exception {\n+ public void shouldCorrelateMessageAfterRestartIfEnteredBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n final long workflowInstanceKey =\n startWorkflowInstance(PROCESS_ID, singletonMap(\"orderId\", \"order-123\"))\n@@ -658,12 +653,7 @@ public class BrokerReprocessingTest {\n @Test\n public void shouldCorrelateMessageAfterRestartIfPublishedBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n publishMessage(\"order canceled\", \"order-123\", singletonMap(\"foo\", \"bar\"));\n reprocessingTrigger.accept(this);\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\ndeleted file mode 100644\nindex c6a05fb..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\n+++ /dev/null\n@@ -1,176 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-import org.junit.runner.RunWith;\n-import org.junit.runners.Parameterized;\n-import org.junit.runners.Parameterized.Parameter;\n-import org.junit.runners.Parameterized.Parameters;\n-\n-@RunWith(Parameterized.class)\n-public class MessageCorrelationTest {\n-\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private static final BpmnModelInstance CATCH_EVENT_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- private static final BpmnModelInstance RECEIVE_TASK_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .receiveTask(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- @Parameter(0)\n- public String elementType;\n-\n- @Parameter(1)\n- public BpmnModelInstance workflow;\n-\n- @Parameters(name = \"{0}\")\n- public static final Object[][] parameters() {\n- return new Object[][] {\n- {\"intermediate message catch event\", CATCH_EVENT_WORKFLOW},\n- {\"receive task\", RECEIVE_TASK_WORKFLOW}\n- };\n- }\n-\n- @Before\n- public void init() {\n- final DeploymentEvent deploymentEvent =\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(workflow, \"wf.bpmn\")\n- .send()\n- .join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfEnteredBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"receive-message\");\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfPublishedBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageAndMergePayload() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .payload(Collections.singletonMap(\"foo\", \"bar\"))\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n-\n- assertElementCompleted(\n- \"wf\",\n- \"receive-message\",\n- (catchEventOccurredEvent) ->\n- assertThat(catchEventOccurredEvent.getPayloadAsMap())\n- .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\ndeleted file mode 100644\nindex 7845eec..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\n+++ /dev/null\n@@ -1,234 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.assertThatThrownBy;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.ZeebeFuture;\n-import io.zeebe.client.api.clients.WorkflowClient;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.client.api.events.WorkflowInstanceEvent;\n-import io.zeebe.client.cmd.ClientException;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.time.Duration;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-\n-public class PublishMessageTest {\n-\n- private static final BpmnModelInstance WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"catch-event\")\n- .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .endEvent()\n- .done();\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private WorkflowClient workflowClient;\n-\n- @Before\n- public void init() {\n-\n- workflowClient = clientRule.getClient().workflowClient();\n-\n- final DeploymentEvent deploymentEvent =\n- workflowClient.newDeployCommand().addWorkflowModel(WORKFLOW, \"wf.bpmn\").send().join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageToAllSubscriptions() {\n- // given\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageWithZeroTTL() {\n- // given\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"catch-event\");\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .send()\n- .join();\n-\n- // then\n- assertElementCompleted(\"wf\", \"catch-event\");\n- }\n-\n- @Test\n- public void shouldNotCorrelateMessageAfterTTL() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .payload(Collections.singletonMap(\"msg\", \"failure\"))\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ofMinutes(1))\n- .payload(Collections.singletonMap(\"msg\", \"expected\"))\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n-\n- assertElementCompleted(\n- \"wf\",\n- \"catch-event\",\n- (catchEventOccurred) ->\n- assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n- }\n-\n- @Test\n- public void shouldCorrelateMessageOnDifferentPartitions() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-124\")\n- .send()\n- .join();\n-\n- // when\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-124\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldRejectMessageWithSameId() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send()\n- .join();\n-\n- // when\n- final ZeebeFuture<Void> future =\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send();\n-\n- // then\n- assertThatThrownBy(future::join)\n- .isInstanceOf(ClientException.class)\n- .hasMessageContaining(\"message with id 'foo' is already published\");\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..0e37c95\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,196 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.intent.MessageIntent;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import java.util.Collections;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent()\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldPublishMessageOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+ });\n+\n+ // then\n+ assertThat(RecordingExporter.messageRecords(MessageIntent.PUBLISHED).limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+\n+ // when\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnSamePartitionsAfterRestart() {\n+ // given\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(15)\n+ .exists())\n+ .isTrue();\n+\n+ // when\n+ brokerRule.stopBroker();\n+ brokerRule.startBroker();\n+\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ private void createWorkflowInstance(Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+\n+ private void publishMessage(String correlationKey, Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"message\")\n+ .correlationKey(correlationKey)\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\nnew file mode 100644\nindex 0000000..3b08572\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\n@@ -0,0 +1,198 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.assertThatThrownBy;\n+import static org.assertj.core.api.Assertions.entry;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.ZeebeFuture;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.client.cmd.ClientException;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import java.time.Duration;\n+import java.util.Collections;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationTest {\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"catch-event\")\n+ .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n+ .endEvent()\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessage() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .payload(Collections.singletonMap(\"foo\", \"bar\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertWorkflowInstanceCompleted(PROCESS_ID);\n+\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurredEvent) ->\n+ assertThat(catchEventOccurredEvent.getPayloadAsMap())\n+ .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageWithZeroTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ assertElementActivated(\"catch-event\");\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(PROCESS_ID, \"catch-event\");\n+ }\n+\n+ @Test\n+ public void shouldNotCorrelateMessageAfterTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .payload(Collections.singletonMap(\"msg\", \"failure\"))\n+ .send()\n+ .join();\n+\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ofMinutes(1))\n+ .payload(Collections.singletonMap(\"msg\", \"expected\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurred) ->\n+ assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n+ }\n+\n+ @Test\n+ public void shouldRejectMessageWithSameId() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send()\n+ .join();\n+\n+ // when\n+ final ZeebeFuture<Void> future =\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send();\n+\n+ // then\n+ assertThatThrownBy(future::join)\n+ .isInstanceOf(ClientException.class)\n+ .hasMessageContaining(\"message with id 'foo' is already published\");\n+ }\n+}\n", "diff --git a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\nindex b2a90d8..dbad824 100644\n--- a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n+++ b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n@@ -6,6 +6,7 @@ import {\n DropZoneRef,\n computed,\n onKeyStroke,\n+ onMounted,\n provide,\n ref,\n useEventListener,\n@@ -85,6 +86,8 @@ function transition(direction: TransitionDirection) {\n \n setTimeout(() => {\n isTransitioning.value = false\n+\n+ setTimeout(focusInput, 100)\n }, 1000)\n }\n \n@@ -113,6 +116,19 @@ async function goPrevious() {\n goToPrevious()\n }\n \n+function focusInput() {\n+ if (document && typeof document !== 'undefined') {\n+ const inputEl =\n+ (document.querySelector('.nc-cell input') as HTMLInputElement) ||\n+ (document.querySelector('.nc-cell textarea') as HTMLTextAreaElement)\n+\n+ if (inputEl) {\n+ inputEl.select()\n+ inputEl.focus()\n+ }\n+ }\n+}\n+\n useEventListener('wheel', (event) => {\n if (Math.abs(event.deltaX) < Math.abs(event.deltaY)) {\n // Scrolling more vertically than horizontally\n@@ -130,6 +146,8 @@ useEventListener('wheel', (event) => {\n \n onKeyStroke(['ArrowLeft', 'ArrowDown'], goPrevious)\n onKeyStroke(['ArrowRight', 'ArrowUp', 'Enter', 'Space'], goNext)\n+\n+onMounted(focusInput)\n </script>\n \n <template>\n", "diff --git a/storage/wal/verifier_test.go b/storage/wal/verifier_test.go\nindex 61e1536..a44755f 100644\n--- a/storage/wal/verifier_test.go\n+++ b/storage/wal/verifier_test.go\n@@ -138,22 +138,13 @@ func writeCorruptEntries(file *os.File, t *testing.T, n int) {\n \t\t}\n \t}\n \n-\n \t// Write some random bytes to the file to simulate corruption.\n \tif _, err := file.Write(corruption); err != nil {\n \t\tfatal(t, \"corrupt WAL segment\", err)\n \t}\n-\tcorrupt := []byte{1, 255, 0, 3, 45, 26, 110}\n-\n-\twrote, err := file.Write(corrupt)\n-\tif err != nil {\n-\t\tt.Fatal(err)\n-\t} else if wrote != len(corrupt) {\n-\t\tt.Fatal(\"Error writing corrupt data to file\")\n-\t}\n \n \tif err := file.Close(); err != nil {\n-\t\tt.Fatalf(\"Error: filed to close file: %v\\n\", err)\n+\t\tt.Fatalf(\"Error: failed to close file: %v\\n\", err)\n \t}\n }\n \n"]
4
["06960183db42cba1b1f1a8077660ba8c801c9e18", "2d416be63eeec9e7fdb90a62c40c8ad8f0672efa", "5373c3036866db58b322b424d3be9dedff57a376", "fba4326c72fc22d81aba6976a9fef1e4b6154fd9"]
["fix", "test", "feat", "refactor"]
fixing deploying to kubernetes Signed-off-by: Rajesh Rajendran <[email protected]>,remove unnecessary spotless definition It receives this already from the parent pom.,tests should pass now Make the code more safe,add link to roadmap,create DashboardDetails
["diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml\nindex 7e42967..77e4abf 100644\n--- a/.github/workflows/frontend.yaml\n+++ b/.github/workflows/frontend.yaml\n@@ -22,26 +22,22 @@ jobs:\n ${{ runner.OS }}-build-\n ${{ runner.OS }}-\n \n+ - uses: azure/k8s-set-context@v1\n+ with:\n+ method: kubeconfig\n+ kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.\n+ id: setcontext\n - name: Install\n run: npm install\n \n- - name: Build\n- run: npm run build:staging\n- env:\n- ENVIRONMENT: staging\n-\n- - name: Deploy\n- env:\n- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}\n- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}\n- AWS_REGION: eu-central-1\n- AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}\n+ - name: Build and deploy\n run: |\n- aws configure set default.s3.signature_version s3v4\n- aws --endpoint-url https://${{secrets.DOMAIN_NAME}}/frontend/ s3 cp \\\n- --recursive \\\n- --region \"$AWS_REGION\" \\\n- public s3://$AWS_S3_BUCKET_NAME\n+ cd frontend\n+ bash build.sh\n+ cp -arl public frontend\n+ minio_pod=$(kubectl get po -n db -l app.kubernetes.io/name=minio -n db --output custom-columns=name:.metadata.name | tail -n+2)\n+ kubectl -n db cp frontend $minio_pod:/data/\n+ rm -rf frontend\n \n # - name: Debug Job\n # if: ${{ failure() }}\n", "diff --git a/benchmarks/project/pom.xml b/benchmarks/project/pom.xml\nindex 62030b6..ab87dea 100644\n--- a/benchmarks/project/pom.xml\n+++ b/benchmarks/project/pom.xml\n@@ -123,11 +123,6 @@\n </plugin>\n \n <plugin>\n- <groupId>com.diffplug.spotless</groupId>\n- <artifactId>spotless-maven-plugin</artifactId>\n- </plugin>\n-\n- <plugin>\n <groupId>org.apache.maven.plugins</groupId>\n <artifactId>maven-shade-plugin</artifactId>\n <executions>\n", "diff --git a/goreleaserlib/goreleaser.go b/goreleaserlib/goreleaser.go\nindex 28ba3f4..67ba95d 100644\n--- a/goreleaserlib/goreleaser.go\n+++ b/goreleaserlib/goreleaser.go\n@@ -27,6 +27,15 @@ import (\n \tyaml \"gopkg.in/yaml.v2\"\n )\n \n+var (\n+\tnormalPadding = cli.Default.Padding\n+\tincreasedPadding = normalPadding * 2\n+)\n+\n+func init() {\n+\tlog.SetHandler(cli.Default)\n+}\n+\n var pipes = []pipeline.Piper{\n \tdefaults.Pipe{}, // load default configs\n \tgit.Pipe{}, // get and validate git repo state\n@@ -89,17 +98,15 @@ func Release(flags Flags) error {\n \t\tctx.Publish = false\n \t}\n \tctx.RmDist = flags.Bool(\"rm-dist\")\n-\tlogger, _ := log.Log.(*log.Logger)\n-\thandler, _ := logger.Handler.(*cli.Handler)\n \tfor _, pipe := range pipes {\n-\t\thandler.Padding = 3\n+\t\tcli.Default.Padding = normalPadding\n \t\tlog.Infof(\"\\033[1m%s\\033[0m\", strings.ToUpper(pipe.String()))\n-\t\thandler.Padding = 6\n+\t\tcli.Default.Padding = increasedPadding\n \t\tif err := handle(pipe.Run(ctx)); err != nil {\n \t\t\treturn err\n \t\t}\n \t}\n-\thandler.Padding = 3\n+\tcli.Default.Padding = normalPadding\n \treturn nil\n }\n \ndiff --git a/main.go b/main.go\nindex b9b961d..7ced1dd 100644\n--- a/main.go\n+++ b/main.go\n@@ -18,7 +18,7 @@ var (\n )\n \n func init() {\n-\tlog.SetHandler(lcli.New(os.Stdout))\n+\tlog.SetHandler(lcli.Default)\n }\n \n func main() {\n", "diff --git a/packages/plugin-core/README.md b/packages/plugin-core/README.md\nindex 3c25c9b..c7506d4 100644\n--- a/packages/plugin-core/README.md\n+++ b/packages/plugin-core/README.md\n@@ -187,6 +187,10 @@ When the workspace opens, it will show dialogue to install the recommended exten\n \n See [[FAQ]] to answers for common questions.\n \n+# Roadmap\n+\n+Check out our [public roadmap](https://github.com/orgs/dendronhq/projects/1) to see the features we're working on and to vote for what you want to see next. \n+\n \n # Contributing\n \n", "diff --git a/client/src/components/MentorSearch.tsx b/client/src/components/MentorSearch.tsx\nindex 622560a..06f0114 100644\n--- a/client/src/components/MentorSearch.tsx\n+++ b/client/src/components/MentorSearch.tsx\n@@ -7,8 +7,9 @@ type Props = UserProps & {\n };\n \n export function MentorSearch(props: Props) {\n- const courseService = useMemo(() => new CourseService(props.courseId), [props.courseId]);\n+ const { courseId, ...otherProps } = props;\n+ const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const handleSearch = useCallback(async (value: string) => courseService.searchMentors(value), [courseService]);\n \n- return <UserSearch {...props} searchFn={handleSearch} />;\n+ return <UserSearch {...otherProps} searchFn={handleSearch} />;\n }\ndiff --git a/client/src/components/Student/DashboardDetails.tsx b/client/src/components/Student/DashboardDetails.tsx\nnew file mode 100644\nindex 0000000..30506ef\n--- /dev/null\n+++ b/client/src/components/Student/DashboardDetails.tsx\n@@ -0,0 +1,89 @@\n+import { BranchesOutlined, CloseCircleTwoTone, SolutionOutlined, UndoOutlined } from '@ant-design/icons';\n+import { Button, Descriptions, Drawer } from 'antd';\n+import { CommentModal, MentorSearch } from 'components';\n+import { useState } from 'react';\n+import { StudentDetails } from 'services/course';\n+import { MentorBasic } from '../../../../common/models';\n+import css from 'styled-jsx/css';\n+\n+type Props = {\n+ details: StudentDetails | null;\n+ courseId: number;\n+ onClose: () => void;\n+ onCreateRepository: () => void;\n+ onRestoreStudent: () => void;\n+ onExpelStudent: (comment: string) => void;\n+ onIssueCertificate: () => void;\n+ onUpdateMentor: (githubId: string) => void;\n+};\n+\n+export function DashboardDetails(props: Props) {\n+ const [expelMode, setExpelMode] = useState(false);\n+ const { details } = props;\n+ if (details == null) {\n+ return null;\n+ }\n+ return (\n+ <>\n+ <Drawer\n+ width={600}\n+ title={`${details.name} , ${details.githubId}`}\n+ placement=\"right\"\n+ closable={false}\n+ onClose={props.onClose}\n+ visible={!!details}\n+ >\n+ <div className=\"student-details-actions\">\n+ <Button\n+ disabled={!details.isActive || !!details.repository}\n+ icon={<BranchesOutlined />}\n+ onClick={props.onCreateRepository}\n+ >\n+ Create Repository\n+ </Button>\n+ <Button disabled={!details.isActive} icon={<SolutionOutlined />} onClick={props.onIssueCertificate}>\n+ Issue Certificate\n+ </Button>\n+ <Button\n+ hidden={!details.isActive}\n+ icon={<CloseCircleTwoTone twoToneColor=\"red\" />}\n+ onClick={() => setExpelMode(true)}\n+ >\n+ Expel\n+ </Button>\n+ <Button hidden={details.isActive} icon={<UndoOutlined />} onClick={props.onRestoreStudent}>\n+ Restore\n+ </Button>\n+ <Descriptions bordered layout=\"vertical\" size=\"small\" column={1}>\n+ <Descriptions.Item label=\"Mentor\">\n+ <MentorSearch\n+ style={{ width: '100%' }}\n+ onChange={props.onUpdateMentor}\n+ courseId={props.courseId}\n+ keyField=\"githubId\"\n+ value={(details.mentor as MentorBasic)?.githubId}\n+ defaultValues={details.mentor ? [details.mentor as any] : []}\n+ />\n+ </Descriptions.Item>\n+ </Descriptions>\n+ </div>\n+ </Drawer>\n+ <CommentModal\n+ title=\"Expelling Reason\"\n+ visible={expelMode}\n+ onCancel={() => setExpelMode(false)}\n+ onOk={(text: string) => {\n+ props.onExpelStudent(text);\n+ setExpelMode(false);\n+ }}\n+ />\n+ <style jsx>{styles}</style>\n+ </>\n+ );\n+}\n+\n+const styles = css`\n+ .student-details-actions :global(.ant-btn) {\n+ margin: 0 8px 8px 0;\n+ }\n+`;\ndiff --git a/client/src/components/Student/index.ts b/client/src/components/Student/index.ts\nindex 71e28de..076f0e2 100644\n--- a/client/src/components/Student/index.ts\n+++ b/client/src/components/Student/index.ts\n@@ -1 +1,2 @@\n export { default as AssignStudentModal } from './AssignStudentModal';\n+export { DashboardDetails } from './DashboardDetails';\ndiff --git a/client/src/components/StudentSearch.tsx b/client/src/components/StudentSearch.tsx\nindex 5952aed..7c14263 100644\n--- a/client/src/components/StudentSearch.tsx\n+++ b/client/src/components/StudentSearch.tsx\n@@ -7,8 +7,9 @@ type Props = UserProps & {\n };\n \n export function StudentSearch(props: Props) {\n- const courseService = useMemo(() => new CourseService(props.courseId), [props.courseId]);\n+ const { courseId, ...otherProps } = props;\n+ const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const handleSearch = useCallback(async (value: string) => courseService.searchStudents(value), [courseService]);\n \n- return <UserSearch {...props} searchFn={handleSearch} />;\n+ return <UserSearch {...otherProps} searchFn={handleSearch} />;\n }\ndiff --git a/client/src/components/UserSearch.tsx b/client/src/components/UserSearch.tsx\nindex ff95941..4075827 100644\n--- a/client/src/components/UserSearch.tsx\n+++ b/client/src/components/UserSearch.tsx\n@@ -14,7 +14,7 @@ export type UserProps = SelectProps<string> & {\n \n export function UserSearch(props: UserProps) {\n const [data, setData] = useState<Person[]>([]);\n- const { searchFn = defaultSearch, defaultValues } = props;\n+ const { searchFn = defaultSearch, defaultValues, keyField, ...otherProps } = props;\n \n useEffect(() => {\n setData(defaultValues ?? []);\n@@ -29,7 +29,6 @@ export function UserSearch(props: UserProps) {\n }\n };\n \n- const { keyField, ...otherProps } = props;\n return (\n <Select\n {...otherProps}\ndiff --git a/client/src/pages/course/admin/students.tsx b/client/src/pages/course/admin/students.tsx\nindex c15dee4..220ef54 100644\n--- a/client/src/pages/course/admin/students.tsx\n+++ b/client/src/pages/course/admin/students.tsx\n@@ -2,15 +2,13 @@ import {\n BranchesOutlined,\n CheckCircleTwoTone,\n ClockCircleTwoTone,\n- CloseCircleTwoTone,\n FileExcelOutlined,\n MinusCircleOutlined,\n- SolutionOutlined,\n- UndoOutlined,\n } from '@ant-design/icons';\n-import { Button, Drawer, message, Row, Statistic, Switch, Table, Typography, Descriptions } from 'antd';\n+import { Button, message, Row, Statistic, Switch, Table, Typography } from 'antd';\n import { ColumnProps } from 'antd/lib/table/Column';\n-import { CommentModal, PageLayout, withSession, MentorSearch } from 'components';\n+import { PageLayout, withSession } from 'components';\n+import { DashboardDetails } from 'components/Student';\n import {\n boolIconRenderer,\n boolSorter,\n@@ -21,14 +19,12 @@ import {\n } from 'components/Table';\n import { useLoading } from 'components/useLoading';\n import withCourseData from 'components/withCourseData';\n+import { isCourseManager } from 'domain/user';\n import _ from 'lodash';\n import { useMemo, useState } from 'react';\n import { useAsync } from 'react-use';\n-import { isCourseManager } from 'domain/user';\n import { CourseService, StudentDetails } from 'services/course';\n import { CoursePageProps } from 'services/models';\n-import css from 'styled-jsx/css';\n-import { MentorBasic } from '../../../../../common/models';\n \n const { Text } = Typography;\n \n@@ -39,7 +35,6 @@ function Page(props: Props) {\n const courseId = props.course.id;\n \n const [loading, withLoading] = useLoading(false);\n- const [expelMode, setExpelMode] = useState(false);\n const [isManager] = useState(isCourseManager(props.session, props.course.id));\n const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const [students, setStudents] = useState([] as StudentDetails[]);\n@@ -77,7 +72,6 @@ function Page(props: Props) {\n await courseService.expelStudent(githubId, text);\n message.info('Student has been expelled');\n }\n- setExpelMode(false);\n });\n \n const restoreStudent = withLoading(async () => {\n@@ -114,59 +108,20 @@ function Page(props: Props) {\n <div>{renderToolbar()}</div>\n </Row>\n <Table rowKey=\"id\" pagination={{ pageSize: 100 }} size=\"small\" dataSource={students} columns={getColumns()} />\n- <Drawer\n- width={400}\n- title={details ? `${details.name} , ${details.githubId}` : ''}\n- placement=\"right\"\n- closable={false}\n+\n+ <DashboardDetails\n+ onUpdateMentor={updateMentor}\n+ onRestoreStudent={restoreStudent}\n+ onIssueCertificate={issueCertificate}\n+ onExpelStudent={expelStudent}\n+ onCreateRepository={createRepository}\n onClose={() => {\n setDetails(null);\n loadStudents();\n }}\n- visible={!!details}\n- >\n- <div className=\"student-details-actions\">\n- <Button\n- disabled={!details?.isActive || !!details.repository}\n- icon={<BranchesOutlined />}\n- onClick={createRepository}\n- >\n- Create Repository\n- </Button>\n- <Button disabled={!details?.isActive} icon={<SolutionOutlined />} onClick={issueCertificate}>\n- Issue Certificate\n- </Button>\n- <Button\n- hidden={!details?.isActive}\n- icon={<CloseCircleTwoTone twoToneColor=\"red\" />}\n- onClick={() => setExpelMode(true)}\n- >\n- Expel\n- </Button>\n- <Button hidden={details?.isActive} icon={<UndoOutlined />} onClick={restoreStudent}>\n- Restore\n- </Button>\n- <Descriptions bordered layout=\"vertical\" size=\"small\" column={1}>\n- <Descriptions.Item label=\"Mentor\">\n- <MentorSearch\n- style={{ width: '100%' }}\n- onChange={updateMentor}\n- courseId={props.course.id}\n- keyField=\"githubId\"\n- value={(details?.mentor as MentorBasic)?.githubId}\n- defaultValues={details?.mentor ? [details?.mentor as any] : []}\n- />\n- </Descriptions.Item>\n- </Descriptions>\n- </div>\n- </Drawer>\n- <CommentModal\n- title=\"Expelling Reason\"\n- visible={expelMode}\n- onCancel={() => setExpelMode(false)}\n- onOk={expelStudent}\n+ details={details}\n+ courseId={props.course.id}\n />\n- <style jsx>{styles}</style>\n </PageLayout>\n );\n }\n@@ -306,14 +261,4 @@ function calculateStats(students: StudentDetails[]) {\n };\n }\n \n-const styles = css`\n- :global(.rs-table-row-disabled) {\n- opacity: 0.25;\n- }\n-\n- .student-details-actions :global(.ant-btn) {\n- margin: 0 8px 8px 0;\n- }\n-`;\n-\n export default withCourseData(withSession(Page));\ndiff --git a/client/src/styles/main.css b/client/src/styles/main.css\nindex 2ccac3c..df3cc8c 100644\n--- a/client/src/styles/main.css\n+++ b/client/src/styles/main.css\n@@ -21,6 +21,10 @@ body,\n display: none;\n }\n \n+.ant-drawer-content-wrapper {\n+ max-width: 85%;\n+}\n+\n .footer-dark.ant-layout-footer {\n background: #000;\n color: #fff;\n"]
5
["3f2eec37f76c1ad9408e423e49fe5bfe3e17d943", "7f9721dc9bbf66a3712d59352f64ca089da139f0", "5636313d7c9cfbd9f48578fd104771d65eae9720", "94202f01e44c58bee4419044f8a18ac5f1a50dff", "fd5f211916c989fddc2ee5afeeb7d46e6a2f51cb"]
["ci", "build", "fix", "docs", "feat"]
set first-attempt to 5s and subsequent-attempt to 180s by default,ecma 7 ready,simplyfy statement,switch QA to new testbench-1.x-prod In order to use the new Testbench that is compatible with Zeebe 1.x versions, this switches the client id and secrets used by the QA stage.,fixa few issues
["diff --git a/testnet/stacks-node/src/config.rs b/testnet/stacks-node/src/config.rs\nindex 24ca06c..d80f721 100644\n--- a/testnet/stacks-node/src/config.rs\n+++ b/testnet/stacks-node/src/config.rs\n@@ -1414,8 +1414,8 @@ impl MinerConfig {\n pub fn default() -> MinerConfig {\n MinerConfig {\n min_tx_fee: 1,\n- first_attempt_time_ms: 1_000,\n- subsequent_attempt_time_ms: 30_000,\n+ first_attempt_time_ms: 5_000,\n+ subsequent_attempt_time_ms: 180_000,\n microblock_attempt_time_ms: 30_000,\n probability_pick_no_estimate_tx: 5,\n }\n", "diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex f7c6b23..4a00c65 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -266,7 +266,7 @@ module.exports = {\n : new UglifyJsPlugin({\n uglifyOptions: {\n ie8: false,\n- ecma: 6,\n+ ecma: 7,\n compress: {\n warnings: false,\n // Disabled because of an issue with Uglify breaking seemingly valid code:\n", "diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 176ab58..bead402 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -326,7 +326,7 @@ pipeline {\n TAG = \"${env.VERSION}-${env.GIT_COMMIT}\"\n DOCKER_GCR = credentials(\"zeebe-gcr-serviceaccount-json\")\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\n QA_RUN_VARIABLES = \"{\\\"zeebeImage\\\": \\\"${env.IMAGE}:${env.TAG}\\\", \\\"generationTemplate\\\": \\\"${params.GENERATION_TEMPLATE}\\\", \" +\n \"\\\"channel\\\": \\\"Internal Dev\\\", \\\"branch\\\": \\\"${env.BRANCH_NAME}\\\", \\\"build\\\": \\\"${currentBuild.absoluteUrl}\\\", \" +\n \"\\\"businessKey\\\": \\\"${currentBuild.absoluteUrl}\\\", \\\"processId\\\": \\\"qa-protocol\\\"}\"\n@@ -341,7 +341,7 @@ pipeline {\n withVault(\n [vaultSecrets:\n [\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\n secretValues:\n [\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\n", "diff --git a/README.md b/README.md\nindex d944d22..5099f03 100644\n--- a/README.md\n+++ b/README.md\n@@ -10,9 +10,8 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n \n <!-- toc -->\n \n-* [Motivation](#motivation)\n+* [Introduction](#introduction)\n * [Installation](#installation)\n- + [Setting up a quick project](#setting-up-a-quick-project)\n * [Usage](#usage)\n + [Creating stores](#creating-stores)\n + [Creating reactive views](#creating-reactive-views)\n@@ -35,12 +34,14 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n Easy State consists of two wrapper functions only. `store` creates state stores and `view` creates reactive components, which re-render whenever state stores are mutated. The rest is just plain JavaScript.\n \n ```js\n-import React, from 'react'\n+import React from 'react'\n import { store, view } from 'react-easy-state'\n \n+// stores are normal objects\n const clock = store({ time: new Date() })\n setInterval(() => clock.time = new Date(), 1000)\n \n+// reactive components re-render on store mutations\n function ClockComp () {\n return <div>{clock.time}</div>\n }\n"]
5
["d35d302cadf355a169dca6636597183de6bbee23", "6aa63c9b8d4dcdbb401743adc3c9a1020d943250", "f86944ff00b970d7e2da48abbff43e58bdf29b99", "c81a0c2999454c859b4bf4da5779712960d239be", "b8a664c1b10f4e30a3e221a14211a3cdaf90b7f4"]
["fix", "build", "refactor", "ci", "docs"]
add clean up test Add another clean up test, which verifies that the state is cleaned up after the timer (non-recurring) is triggered.,run nix macos jobs on macos-13 to try and avoid SIP,don't delay rendering if initialLayout is not specified,use ng2 loadNextToLocation,bundle and tree shake assets with webpack
["diff --git a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\nindex d36b4c9..ca5047f 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n@@ -630,6 +630,40 @@ public final class ProcessExecutionCleanStateTest {\n }\n \n @Test\n+ public void testProcessWithTriggerTimerStartEvent() {\n+ // given\n+ final var deployment =\n+ engineRule\n+ .deployment()\n+ .withXmlResource(\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .timerWithDate(\"=now() + duration(\\\"PT15S\\\")\")\n+ .endEvent()\n+ .done())\n+ .deploy();\n+\n+ final var processDefinitionKey =\n+ deployment.getValue().getProcessesMetadata().get(0).getProcessDefinitionKey();\n+\n+ // when\n+ engineRule.awaitProcessingOf(\n+ RecordingExporter.timerRecords(TimerIntent.CREATED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .getFirst());\n+\n+ engineRule.increaseTime(Duration.ofSeconds(15));\n+\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n+ // then\n+ assertThatStateIsEmpty();\n+ }\n+\n+ @Test\n public void testProcessWithTimerStartEventRedeployment() {\n // given\n final var deployment =\n", "diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml\nnew file mode 100644\nindex 0000000..5be7d17\n--- /dev/null\n+++ b/.github/actionlint.yaml\n@@ -0,0 +1,7 @@\n+self-hosted-runner:\n+ # Labels of self-hosted runner in array of strings.\n+ labels: [macos-13]\n+# Configuration variables in array of strings defined in your repository or\n+# organization. `null` means disabling configuration variables check.\n+# Empty array means no configuration variable is allowed.\n+config-variables: null\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex e37346c..dce77e1 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -37,7 +37,7 @@ jobs:\n - \"3.10\"\n - \"3.11\"\n include:\n- - os: macos-latest\n+ - os: macos-13\n python-version: \"3.10\"\n steps:\n - name: checkout\ndiff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 005a850..8db22e2 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -3,7 +3,7 @@ ci:\n autofix_prs: false\n autoupdate_commit_msg: \"chore(deps): pre-commit.ci autoupdate\"\n skip:\n- - actionlint\n+ - actionlint-system\n - deadnix\n - just\n - nixpkgs-fmt\n@@ -17,9 +17,9 @@ default_stages:\n - commit\n repos:\n - repo: https://github.com/rhysd/actionlint\n- rev: v1.6.24\n+ rev: v1.6.25\n hooks:\n- - id: actionlint\n+ - id: actionlint-system\n - repo: https://github.com/psf/black\n rev: 23.3.0\n hooks:\n@@ -30,7 +30,7 @@ repos:\n - id: nbstripout\n exclude: .+/rendered/.+\n - repo: https://github.com/codespell-project/codespell\n- rev: v2.2.4\n+ rev: v2.2.5\n hooks:\n - id: codespell\n additional_dependencies:\n", "diff --git a/packages/react-native-tab-view/example/src/BottomBarIconTextExample.js b/packages/react-native-tab-view/example/src/BottomBarIconTextExample.js\nindex fcc4708..397e6e6 100644\n--- a/packages/react-native-tab-view/example/src/BottomBarIconTextExample.js\n+++ b/packages/react-native-tab-view/example/src/BottomBarIconTextExample.js\n@@ -1,7 +1,7 @@\n /* @flow */\n \n import React, { Component } from 'react';\n-import { Animated, View, Text, Dimensions, StyleSheet } from 'react-native';\n+import { Animated, View, Text, StyleSheet } from 'react-native';\n import { TabViewAnimated, TabBar } from 'react-native-tab-view';\n import { Ionicons } from '@exponent/vector-icons';\n \n@@ -13,7 +13,6 @@ const styles = StyleSheet.create({\n backgroundColor: '#222',\n },\n tab: {\n- opacity: 1,\n padding: 0,\n },\n icon: {\n@@ -50,11 +49,6 @@ const styles = StyleSheet.create({\n },\n });\n \n-const initialLayout = {\n- height: 0,\n- width: Dimensions.get('window').width,\n-};\n-\n export default class TopBarIconExample extends Component {\n \n static title = 'Bottom bar with indicator';\n@@ -80,14 +74,16 @@ export default class TopBarIconExample extends Component {\n };\n \n _renderIndicator = (props) => {\n- const { width, position } = props;\n+ const { width, opacity, position } = props;\n \n- const translateX = Animated.multiply(position, new Animated.Value(width));\n+ const translateX = Animated.multiply(position, width);\n \n return (\n <Animated.View\n- style={[ styles.indicator, { width: width - 8, transform: [ { translateX } ] } ]}\n- />\n+ style={[ styles.container, { width, opacity, transform: [ { translateX } ] } ]}\n+ >\n+ <View style={styles.indicator} />\n+ </Animated.View>\n );\n };\n \n@@ -146,7 +142,6 @@ export default class TopBarIconExample extends Component {\n renderScene={this._renderScene}\n renderFooter={this._renderFooter}\n onRequestChangeTab={this._handleChangeTab}\n- initialLayout={initialLayout}\n />\n );\n }\ndiff --git a/packages/react-native-tab-view/example/src/CoverflowExample.js b/packages/react-native-tab-view/example/src/CoverflowExample.js\nindex 8950c0e..2336591 100644\n--- a/packages/react-native-tab-view/example/src/CoverflowExample.js\n+++ b/packages/react-native-tab-view/example/src/CoverflowExample.js\n@@ -2,7 +2,7 @@\n /* eslint-disable import/no-commonjs */\n \n import React, { Component } from 'react';\n-import { Animated, View, Image, Text, Dimensions, StyleSheet } from 'react-native';\n+import { Animated, View, Image, Text, StyleSheet } from 'react-native';\n import { TabViewAnimated, TabViewPagerPan } from 'react-native-tab-view';\n \n const styles = StyleSheet.create({\n@@ -48,11 +48,6 @@ const ALBUMS = {\n 'Lost Horizons': require('../assets/album-art-8.jpg'),\n };\n \n-const initialLayout = {\n- height: 0,\n- width: Dimensions.get('window').width,\n-};\n-\n export default class CoverflowExample extends Component {\n \n static title = 'Coverflow';\n@@ -142,7 +137,6 @@ export default class CoverflowExample extends Component {\n renderPager={this._renderPager}\n renderScene={this._renderScene}\n onRequestChangeTab={this._handleChangeTab}\n- initialLayout={initialLayout}\n />\n );\n }\ndiff --git a/packages/react-native-tab-view/example/src/ScrollViewsExample.js b/packages/react-native-tab-view/example/src/ScrollViewsExample.js\nindex 94fefbb..5be3b69 100644\n--- a/packages/react-native-tab-view/example/src/ScrollViewsExample.js\n+++ b/packages/react-native-tab-view/example/src/ScrollViewsExample.js\n@@ -28,11 +28,6 @@ const styles = StyleSheet.create({\n },\n });\n \n-const initialLayout = {\n- height: 0,\n- width: Dimensions.get('window').width,\n-};\n-\n export default class TopBarTextExample extends Component {\n \n static title = 'Scroll views';\n@@ -104,6 +99,7 @@ export default class TopBarTextExample extends Component {\n renderLabel={this._renderLabel(props)}\n indicatorStyle={styles.indicator}\n tabStyle={styles.tab}\n+ tabWidth={80}\n style={styles.tabbar}\n />\n );\n@@ -130,7 +126,6 @@ export default class TopBarTextExample extends Component {\n renderScene={this._renderScene}\n renderHeader={this._renderHeader}\n onRequestChangeTab={this._handleChangeTab}\n- initialLayout={initialLayout}\n />\n );\n }\ndiff --git a/packages/react-native-tab-view/example/src/TopBarIconExample.js b/packages/react-native-tab-view/example/src/TopBarIconExample.js\nindex d13755f..5464981 100644\n--- a/packages/react-native-tab-view/example/src/TopBarIconExample.js\n+++ b/packages/react-native-tab-view/example/src/TopBarIconExample.js\n@@ -1,7 +1,7 @@\n /* @flow */\n \n import React, { Component } from 'react';\n-import { View, Dimensions, StyleSheet } from 'react-native';\n+import { View, StyleSheet } from 'react-native';\n import { TabViewAnimated, TabBarTop } from 'react-native-tab-view';\n import { Ionicons } from '@exponent/vector-icons';\n \n@@ -22,11 +22,6 @@ const styles = StyleSheet.create({\n },\n });\n \n-const initialLayout = {\n- height: 0,\n- width: Dimensions.get('window').width,\n-};\n-\n export default class TopBarIconExample extends Component {\n \n static title = 'Icon only top bar';\n@@ -93,7 +88,6 @@ export default class TopBarIconExample extends Component {\n renderScene={this._renderScene}\n renderHeader={this._renderHeader}\n onRequestChangeTab={this._handleChangeTab}\n- initialLayout={initialLayout}\n />\n );\n }\ndiff --git a/packages/react-native-tab-view/example/src/TopBarTextExample.js b/packages/react-native-tab-view/example/src/TopBarTextExample.js\nindex 30307ad..454533d 100644\n--- a/packages/react-native-tab-view/example/src/TopBarTextExample.js\n+++ b/packages/react-native-tab-view/example/src/TopBarTextExample.js\n@@ -1,7 +1,7 @@\n /* @flow */\n \n import React, { Component } from 'react';\n-import { View, Dimensions, StyleSheet } from 'react-native';\n+import { View, StyleSheet } from 'react-native';\n import { TabViewAnimated, TabBarTop } from 'react-native-tab-view';\n \n const styles = StyleSheet.create({\n@@ -25,11 +25,6 @@ const styles = StyleSheet.create({\n },\n });\n \n-const initialLayout = {\n- height: 0,\n- width: Dimensions.get('window').width,\n-};\n-\n export default class TopBarTextExample extends Component {\n \n static title = 'Scrollable top bar';\n@@ -90,7 +85,6 @@ export default class TopBarTextExample extends Component {\n renderScene={this._renderScene}\n renderHeader={this._renderHeader}\n onRequestChangeTab={this._handleChangeTab}\n- initialLayout={initialLayout}\n />\n );\n }\ndiff --git a/packages/react-native-tab-view/src/TabBar.js b/packages/react-native-tab-view/src/TabBar.js\nindex 615e85a..a03d8e5 100644\n--- a/packages/react-native-tab-view/src/TabBar.js\n+++ b/packages/react-native-tab-view/src/TabBar.js\n@@ -92,6 +92,7 @@ type Props = SceneRendererProps & {\n \n type State = {\n offset: Animated.Value;\n+ visibility: Animated.Value;\n }\n \n export default class TabBar extends Component<DefaultProps, Props, State> {\n@@ -115,8 +116,15 @@ export default class TabBar extends Component<DefaultProps, Props, State> {\n \n state: State = {\n offset: new Animated.Value(0),\n+ visibility: new Animated.Value(0),\n };\n \n+ componentWillMount() {\n+ if (this.props.layout.width || this.props.tabWidth) {\n+ this.state.visibility.setValue(1);\n+ }\n+ }\n+\n componentDidMount() {\n this._adjustScroll(this.props.navigationState.index);\n this._positionListener = this.props.subscribe('position', this._adjustScroll);\n@@ -126,6 +134,16 @@ export default class TabBar extends Component<DefaultProps, Props, State> {\n if (this.props.navigationState !== nextProps.navigationState) {\n this._resetScrollOffset(nextProps);\n }\n+\n+ if (\n+ (this.props.tabWidth !== nextProps.tabWidth && nextProps.tabWidth) ||\n+ (this.props.layout.width !== nextProps.layout.width && nextProps.layout.width)\n+ ) {\n+ Animated.timing(this.state.visibility, {\n+ toValue: 1,\n+ duration: 150,\n+ }).start();\n+ }\n }\n \n componentWillUnmount() {\n@@ -282,7 +300,8 @@ export default class TabBar extends Component<DefaultProps, Props, State> {\n {this.props.renderIndicator ?\n this.props.renderIndicator({\n ...this.props,\n- width: tabWidth,\n+ width: new Animated.Value(tabWidth),\n+ opacity: this.state.visibility,\n }) :\n null\n }\n@@ -307,10 +326,10 @@ export default class TabBar extends Component<DefaultProps, Props, State> {\n {routes.map((route, i) => {\n const focused = index === i;\n const outputRange = inputRange.map(inputIndex => inputIndex === i ? 1 : 0.7);\n- const opacity = position.interpolate({\n+ const opacity = Animated.multiply(this.state.visibility, position.interpolate({\n inputRange,\n outputRange,\n- });\n+ }));\n const scene = {\n route,\n focused,\n@@ -348,14 +367,14 @@ export default class TabBar extends Component<DefaultProps, Props, State> {\n }}\n >\n <View style={styles.container}>\n- <Animated.View style={[ styles.tabitem, { opacity, width: tabWidth }, tabStyle, this.props.tabStyle ]}>\n+ <Animated.View style={[ styles.tabitem, { opacity }, tabWidth ? { width: tabWidth } : null, tabStyle, this.props.tabStyle ]}>\n {icon}\n {label}\n </Animated.View>\n {badge ?\n- <View style={styles.badge}>\n+ <Animated.View style={[ styles.badge, { opacity: this.state.visibility } ]}>\n {badge}\n- </View> : null\n+ </Animated.View> : null\n }\n </View>\n </TouchableItem>\ndiff --git a/packages/react-native-tab-view/src/TabBarTop.js b/packages/react-native-tab-view/src/TabBarTop.js\nindex 0960d4e..84dd6e2 100644\n--- a/packages/react-native-tab-view/src/TabBarTop.js\n+++ b/packages/react-native-tab-view/src/TabBarTop.js\n@@ -28,7 +28,8 @@ const styles = StyleSheet.create({\n });\n \n type IndicatorProps = SceneRendererProps & {\n- width: number;\n+ width: Animated.Valye;\n+ opacity: Animated.Value;\n }\n \n type Props = SceneRendererProps & {\n@@ -50,13 +51,13 @@ export default class TabBarTop extends Component<void, Props, void> {\n );\n \n _renderIndicator = (props: IndicatorProps) => {\n- const { width, position } = props;\n+ const { width, opacity, position } = props;\n \n- const translateX = Animated.multiply(position, new Animated.Value(width));\n+ const translateX = Animated.multiply(position, width);\n \n return (\n <Animated.View\n- style={[ styles.indicator, { width, transform: [ { translateX } ] }, this.props.indicatorStyle ]}\n+ style={[ styles.indicator, { width, opacity, transform: [ { translateX } ] }, this.props.indicatorStyle ]}\n />\n );\n };\ndiff --git a/packages/react-native-tab-view/src/TabViewAnimated.js b/packages/react-native-tab-view/src/TabViewAnimated.js\nindex d484816..4499748 100644\n--- a/packages/react-native-tab-view/src/TabViewAnimated.js\n+++ b/packages/react-native-tab-view/src/TabViewAnimated.js\n@@ -94,19 +94,17 @@ export default class TabViewAnimated extends Component<DefaultProps, Props, Stat\n };\n \n _renderItems = (props: SceneRendererProps) => {\n- if (props.layout.width === 0) {\n- return null;\n- }\n-\n const { renderPager, renderHeader, renderFooter } = this.props;\n+ const { navigationState, layout } = props;\n+ const currentRoute = navigationState.routes[navigationState.index];\n \n return (\n <View style={styles.container}>\n {renderHeader && renderHeader(props)}\n {renderPager({\n ...props,\n- children: props.navigationState.routes.map((route, index) => (\n- <View key={route.key} style={{ width: props.layout.width }}>\n+ children: layout.width ? navigationState.routes.map((route, index) => (\n+ <View key={route.key} style={{ width: layout.width }}>\n {this._renderScene({\n ...props,\n route,\n@@ -114,7 +112,16 @@ export default class TabViewAnimated extends Component<DefaultProps, Props, Stat\n focused: index === props.navigationState.index,\n })}\n </View>\n- )),\n+ )) : (\n+ <View key={currentRoute.key} style={styles.container}>\n+ {this._renderScene({\n+ ...props,\n+ route: currentRoute,\n+ index: navigationState.index,\n+ focused: true,\n+ })}\n+ </View>\n+ ),\n })}\n {renderFooter && renderFooter(props)}\n </View>\n", "diff --git a/ionic/components/nav/nav-controller.ts b/ionic/components/nav/nav-controller.ts\nindex 8e23c4c..37ac0f4 100644\n--- a/ionic/components/nav/nav-controller.ts\n+++ b/ionic/components/nav/nav-controller.ts\n@@ -527,41 +527,13 @@ export class NavController extends Ion {\n * @private\n * TODO\n */\n- createViewComponentRef(type, hostProtoViewRef, viewContainer, viewCtrlBindings) {\n- let bindings = this.bindings.concat(viewCtrlBindings);\n-\n- // the same guts as DynamicComponentLoader.loadNextToLocation\n- var hostViewRef =\n- viewContainer.createHostView(hostProtoViewRef, viewContainer.length, bindings);\n- var newLocation = this._viewManager.getHostElement(hostViewRef);\n- var component = this._viewManager.getComponent(newLocation);\n-\n- var dispose = () => {\n- var index = viewContainer.indexOf(hostViewRef);\n- if (index !== -1) {\n- viewContainer.remove(index);\n- }\n- };\n-\n- // TODO: make-shift ComponentRef_, this is pretty much going to\n- // break in future versions of ng2, keep an eye on it\n- return {\n- location: newLocation,\n- instance: component,\n- dispose: dispose\n- };\n- }\n-\n- /**\n- * @private\n- * TODO\n- */\n- getBindings(viewCtrl) {\n- // create bindings to this ViewController and its NavParams\n- return this.bindings.concat(Injector.resolve([\n+ loadNextToAnchor(type, location, viewCtrl) {\n+ let bindings = this.bindings.concat(Injector.resolve([\n bind(ViewController).toValue(viewCtrl),\n bind(NavParams).toValue(viewCtrl.params),\n ]));\n+\n+ return this._loader.loadNextToLocation(type, location, bindings);\n }\n \n /**\ndiff --git a/ionic/components/nav/nav.ts b/ionic/components/nav/nav.ts\nindex a98a4ef..063eeb9 100644\n--- a/ionic/components/nav/nav.ts\n+++ b/ionic/components/nav/nav.ts\n@@ -192,65 +192,70 @@ export class Nav extends NavController {\n if (structure.tabs) {\n // the component being loaded is an <ion-tabs>\n // Tabs is essentially a pane, cuz it has its own navbar and content containers\n- let contentContainerRef = this._viewManager.getViewContainer(this.anchorElementRef());\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, contentContainerRef, this.getBindings(viewCtrl));\n- viewComponentRef.instance._paneView = true;\n+ this.loadNextToAnchor(componentType, this.anchorElementRef(), viewCtrl).then(componentRef => {\n \n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ componentRef.instance._paneView = true;\n+\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n+\n+ viewCtrl.onReady().then(() => {\n+ done();\n+ });\n \n- viewCtrl.onReady().then(() => {\n- done();\n });\n \n } else {\n // normal ion-view going into pane\n this.getPane(structure, viewCtrl, (pane) => {\n // add the content of the view into the pane's content area\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, pane.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n+ this.loadNextToAnchor(componentType, pane.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // remove the pane if there are no view items left\n- pane.totalViews--;\n- if (pane.totalViews === 0) {\n- pane.dispose && pane.dispose();\n- }\n- });\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n \n- // count how many ViewControllers are in this pane\n- pane.totalViews++;\n+ // remove the pane if there are no view items left\n+ pane.totalViews--;\n+ if (pane.totalViews === 0) {\n+ pane.dispose && pane.dispose();\n+ }\n+ });\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ // count how many ViewControllers are in this pane\n+ pane.totalViews++;\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = pane.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = pane.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+ });\n \n- done();\n });\n }\n }\n@@ -273,7 +278,7 @@ export class Nav extends NavController {\n \n } else {\n // create a new nav pane\n- this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.getBindings(viewCtrl)).then(componentRef => {\n+ this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.bindings).then(componentRef => {\n \n // get the pane reference\n pane = this.newPane;\n@@ -354,17 +359,6 @@ export class Nav extends NavController {\n \n /**\n * @private\n- * TODO\n- * @param {TODO} elementBinder TODO\n- * @param {TODO} id TODO\n- * @return {TODO} TODO\n- */\n-function isComponent(elementBinder, id) {\n- return (elementBinder && elementBinder.componentDirective && elementBinder.componentDirective.metadata.id == id);\n-}\n-\n-/**\n- * @private\n */\n @Directive({selector: 'template[pane-anchor]'})\n class NavPaneAnchor {\n@@ -393,9 +387,9 @@ class NavBarAnchor {\n class ContentAnchor {\n constructor(\n @Host() @Inject(forwardRef(() => Pane)) pane: Pane,\n- viewContainerRef: ViewContainerRef\n+ elementRef: ElementRef\n ) {\n- pane.contentContainerRef = viewContainerRef;\n+ pane.contentAnchorRef = elementRef;\n }\n }\n \ndiff --git a/ionic/components/tabs/tab.ts b/ionic/components/tabs/tab.ts\nindex aa21cad..af5d190 100644\n--- a/ionic/components/tabs/tab.ts\n+++ b/ionic/components/tabs/tab.ts\n@@ -153,40 +153,44 @@ export class Tab extends NavController {\n \n loadContainer(componentType, hostProtoViewRef, viewCtrl, done) {\n \n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, this.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ this.loadNextToAnchor(componentType, this.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = this.tabs.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = this.tabs.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+\n+ });\n \n- done();\n }\n \n }\n@@ -194,10 +198,7 @@ export class Tab extends NavController {\n \n @Directive({selector: 'template[content-anchor]'})\n class TabContentAnchor {\n- constructor(\n- @Host() tab: Tab,\n- viewContainerRef: ViewContainerRef\n- ) {\n- tab.contentContainerRef = viewContainerRef;\n+ constructor(@Host() tab: Tab, elementRef: ElementRef) {\n+ tab.contentAnchorRef = elementRef;\n }\n }\n", "diff --git a/package.json b/package.json\nindex c8051d2..b0a97fb 100644\n--- a/package.json\n+++ b/package.json\n@@ -60,6 +60,7 @@\n \"babel-cli\": \"^6.16.0\",\n \"babel-core\": \"^6.16.0\",\n \"babel-eslint\": \"^7.0.0\",\n+ \"babel-loader\": \"^6.2.5\",\n \"babel-plugin-transform-class-properties\": \"^6.10.2\",\n \"babel-plugin-transform-flow-strip-types\": \"^6.14.0\",\n \"babel-preset-es2015-node6\": \"^0.3.0\",\n@@ -82,6 +83,7 @@\n \"eslint-plugin-react\": \"^6.3.0\",\n \"flow-bin\": \"^0.33.0\",\n \"jsdom\": \"^9.4.2\",\n+ \"json-loader\": \"^0.5.4\",\n \"jsx-chai\": \"^4.0.0\",\n \"mocha\": \"^3.0.2\",\n \"mock-require\": \"^1.3.0\",\n@@ -91,6 +93,8 @@\n \"rimraf\": \"^2.5.2\",\n \"sinon\": \"^1.17.6\",\n \"sinon-chai\": \"^2.8.0\",\n- \"watch\": \"^1.0.0\"\n+ \"source-map-support\": \"^0.4.3\",\n+ \"watch\": \"^1.0.0\",\n+ \"webpack\": \"^1.13.2\"\n }\n }\ndiff --git a/webpack.config.js b/webpack.config.js\nnew file mode 100644\nindex 0000000..0ca6da1\n--- /dev/null\n+++ b/webpack.config.js\n@@ -0,0 +1,44 @@\n+const webpack = require('webpack');\n+const path = require('path');\n+const fs = require('fs');\n+\n+const nodeModules = {\n+ zmq: 'commonjs zmq',\n+ jmp: 'commonjs jmp',\n+ github: 'commonjs github',\n+};\n+\n+module.exports = {\n+ entry: './src/notebook/index.js',\n+ target: 'electron-renderer',\n+ output: {\n+ path: path.join(__dirname, 'app', 'build'),\n+ filename: 'webpacked-notebook.js'\n+ },\n+ module: {\n+ loaders: [\n+ { test: /\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\n+ { test: /\\.json$/, loader: 'json-loader' },\n+ ]\n+ },\n+ resolve: {\n+ extensions: ['', '.js', '.jsx'],\n+ root: path.join(__dirname, 'app'),\n+ // Webpack 1\n+ modulesDirectories: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ path.resolve(__dirname, 'node_modules'),\n+ ],\n+ // Webpack 2\n+ modules: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ ],\n+ },\n+ externals: nodeModules,\n+ plugins: [\n+ new webpack.IgnorePlugin(/\\.(css|less)$/),\n+ new webpack.BannerPlugin('require(\"source-map-support\").install();',\n+ { raw: true, entryOnly: false })\n+ ],\n+ devtool: 'sourcemap'\n+};\n"]
5
["aa746b764e6c54bbbd631210fce35df842d09b12", "54cb6d4643b4a072ff997592a7fa14a69a6c068d", "e9233ae3f7811707945fc2de60971595d83c578d", "085ee958c48d695ba50822d8767d615fd9e887fa", "4ab28fc2e63e975a0c77e18ae644f34fa5f8771a"]
["test", "ci", "fix", "refactor", "build"]
add test for clickhouse-specific `create_table` parameters,use an action for issue assignment,update version (v0.6.18),methods for scanning headers,wire up fixed null encoding
["diff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 678683d..c4e2aec 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -224,6 +224,21 @@ def test_create_table_data(con, data, engine, temp_table):\n assert len(t.execute()) == 3\n \n \n+def test_create_table_with_properties(con, temp_table):\n+ data = pd.DataFrame({\"a\": list(\"abcde\" * 20), \"b\": [1, 2, 3, 4, 5] * 20})\n+ n = len(data)\n+ t = con.create_table(\n+ temp_table,\n+ data,\n+ schema=ibis.schema(dict(a=\"string\", b=\"!uint32\")),\n+ order_by=[\"a\", \"b\"],\n+ partition_by=[\"a\"],\n+ sample_by=[\"b\"],\n+ settings={\"allow_nullable_key\": \"1\"},\n+ )\n+ assert t.count().execute() == n\n+\n+\n @pytest.mark.parametrize(\n \"engine\",\n [\n", "diff --git a/.github/workflows/assign.yml b/.github/workflows/assign.yml\nindex 29d92a8..758874e 100644\n--- a/.github/workflows/assign.yml\n+++ b/.github/workflows/assign.yml\n@@ -8,8 +8,6 @@ jobs:\n runs-on: ubuntu-latest\n if: ${{ github.event.comment.body == '/take' }}\n steps:\n- - uses: actions/checkout@v2\n- - name: Assign issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }}\n- run: gh issue edit ${{ github.event.issue.number }} --add-assignee \"${{ github.event.comment.user.login }}\"\n- env:\n- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+ - uses: pozil/[email protected]\n+ with:\n+ assignees: ${{ github.event.comment.user.login }}\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/src/chainstate/stacks/db/headers.rs b/src/chainstate/stacks/db/headers.rs\nindex a12362d..91eb580 100644\n--- a/src/chainstate/stacks/db/headers.rs\n+++ b/src/chainstate/stacks/db/headers.rs\n@@ -31,8 +31,8 @@ use crate::core::FIRST_BURNCHAIN_CONSENSUS_HASH;\n use crate::core::FIRST_STACKS_BLOCK_HASH;\n use crate::util_lib::db::Error as db_error;\n use crate::util_lib::db::{\n- query_count, query_row, query_row_columns, query_row_panic, query_rows, DBConn, FromColumn,\n- FromRow,\n+ query_count, query_row, query_row_columns, query_row_panic, query_rows, u64_to_sql, DBConn,\n+ FromColumn, FromRow,\n };\n use clarity::vm::costs::ExecutionCost;\n \n@@ -360,4 +360,23 @@ impl StacksChainState {\n }\n Ok(ret)\n }\n+\n+ /// Get all headers at a given Stacks height\n+ pub fn get_all_headers_at_height(\n+ conn: &Connection,\n+ height: u64,\n+ ) -> Result<Vec<StacksHeaderInfo>, Error> {\n+ let qry =\n+ \"SELECT * FROM block_headers WHERE block_height = ?1 ORDER BY burn_header_height DESC\";\n+ let args: &[&dyn ToSql] = &[&u64_to_sql(height)?];\n+ query_rows(conn, qry, args).map_err(|e| e.into())\n+ }\n+\n+ /// Get the highest known header height\n+ pub fn get_max_header_height(conn: &Connection) -> Result<u64, Error> {\n+ let qry = \"SELECT block_height FROM block_headers ORDER BY block_height DESC LIMIT 1\";\n+ query_row(conn, qry, NO_PARAMS)\n+ .map(|row_opt: Option<i64>| row_opt.map(|h| h as u64).unwrap_or(0))\n+ .map_err(|e| e.into())\n+ }\n }\n", "diff --git a/read_buffer/src/row_group.rs b/read_buffer/src/row_group.rs\nindex 91c9fb5..ca77f3c 100644\n--- a/read_buffer/src/row_group.rs\n+++ b/read_buffer/src/row_group.rs\n@@ -958,24 +958,15 @@ impl From<RecordBatch> for RowGroup {\n }\n Some(InfluxColumnType::Field(_)) => {\n let column_data = match arrow_column.data_type() {\n- arrow::datatypes::DataType::Int64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Int64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::Float64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Float64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::UInt64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::UInt64Array>()\n- .unwrap(),\n- ),\n+ arrow::datatypes::DataType::Int64 => {\n+ Column::from(arrow::array::Int64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::Float64 => {\n+ Column::from(arrow::array::Float64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::UInt64 => {\n+ Column::from(arrow::array::UInt64Array::from(arrow_column.data()))\n+ }\n dt => unimplemented!(\n \"data type {:?} currently not supported for field columns\",\n dt\n"]
5
["7e1ece7d3fd41d1e3ee38e479c119494bb269966", "fb3a231b29bc8bff9270b99dd4aff9dad599f21f", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50", "6a63a9d439e18b6b8483abdf19162f476fcf8563", "28b596b8834d1b51be3ac6a2ac30df28f37702d8"]
["test", "ci", "build", "feat", "refactor"]
trigger build every hour for develop To better track stability of the develop branch the build should be triggered on commit and every hour. Other branches should not be effected. - add cron trigger to develop branch - extract variables to identify stable and develop branch,Introduce timediff fn (stub),fix test Write another record so the commit position is updated and we can take a snapshot,remove unnecessary lines from verify-wal test,change notice from 'danger' > 'info' Signed-off-by: Raju Udava <[email protected]>
["diff --git a/Jenkinsfile b/Jenkinsfile\nindex 2c58f61..9daa38f 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -4,9 +4,17 @@\n \n def buildName = \"${env.JOB_BASE_NAME.replaceAll(\"%2F\", \"-\").replaceAll(\"\\\\.\", \"-\").take(20)}-${env.BUILD_ID}\"\n \n+def masterBranchName = 'master'\n+def isMasterBranch = env.BRANCH_NAME == masterBranchName\n+def developBranchName = 'develop'\n+def isDevelopBranch = env.BRANCH_NAME == developBranchName\n+\n //for develop branch keep builds for 7 days to be able to analyse build errors, for all other branches, keep the last 10 builds\n-def daysToKeep = (env.BRANCH_NAME=='develop') ? '7' : '-1'\n-def numToKeep = (env.BRANCH_NAME=='develop') ? '-1' : '10'\n+def daysToKeep = isDevelopBranch ? '7' : '-1'\n+def numToKeep = isDevelopBranch ? '-1' : '10'\n+\n+//the develop branch should be run hourly to detect flaky tests and instability, other branches only on commit\n+def cronTrigger = isDevelopBranch ? '@hourly' : ''\n \n pipeline {\n agent {\n@@ -23,6 +31,10 @@ pipeline {\n SONARCLOUD_TOKEN = credentials('zeebe-sonarcloud-token')\n }\n \n+ triggers {\n+ cron(cronTrigger)\n+ }\n+\n options {\n buildDiscarder(logRotator(daysToKeepStr: daysToKeep, numToKeepStr: numToKeep))\n timestamps()\n@@ -201,7 +213,7 @@ pipeline {\n }\n \n stage('Upload') {\n- when { branch 'develop' }\n+ when { allOf { branch developBranchName ; not { triggeredBy 'TimerTrigger' } } }\n steps {\n retry(3) {\n container('maven') {\n@@ -214,9 +226,11 @@ pipeline {\n }\n \n stage('Post') {\n+ when { not { triggeredBy 'TimerTrigger' } }\n+\n parallel {\n stage('Docker') {\n- when { branch 'develop' }\n+ when { branch developBranchName }\n \n environment {\n VERSION = readMavenPom(file: 'parent/pom.xml').getVersion()\n@@ -227,20 +241,20 @@ pipeline {\n build job: 'zeebe-docker', parameters: [\n string(name: 'BRANCH', value: env.BRANCH_NAME),\n string(name: 'VERSION', value: env.VERSION),\n- booleanParam(name: 'IS_LATEST', value: env.BRANCH_NAME == 'master'),\n- booleanParam(name: 'PUSH', value: env.BRANCH_NAME == 'develop')\n+ booleanParam(name: 'IS_LATEST', value: isMasterBranch),\n+ booleanParam(name: 'PUSH', value: isDevelopBranch)\n ]\n }\n }\n }\n \n stage('Docs') {\n- when { anyOf { branch 'master'; branch 'develop' } }\n+ when { anyOf { branch masterBranchName; branch developBranchName } }\n steps {\n retry(3) {\n build job: 'zeebe-docs', parameters: [\n string(name: 'BRANCH', value: env.BRANCH_NAME),\n- booleanParam(name: 'LIVE', value: env.BRANCH_NAME == 'master')\n+ booleanParam(name: 'LIVE', value: isMasterBranch)\n ]\n }\n }\n", "diff --git a/rust/Cargo.lock b/rust/Cargo.lock\nindex b42616f..4795eb6 100644\n--- a/rust/Cargo.lock\n+++ b/rust/Cargo.lock\n@@ -1287,7 +1287,7 @@ dependencies = [\n [[package]]\n name = \"datafusion\"\n version = \"5.1.0\"\n-source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=8df4132b83d896a0d3db5c82a4eaaa3eaa285d15#8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\"\n+source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=868f3c4de13d13cda84cee33475b9782b94fa60c#868f3c4de13d13cda84cee33475b9782b94fa60c\"\n dependencies = [\n \"ahash 0.7.4\",\n \"arrow 6.0.0\",\ndiff --git a/rust/cubesql/Cargo.toml b/rust/cubesql/Cargo.toml\nindex 3cb386a..9aef494 100644\n--- a/rust/cubesql/Cargo.toml\n+++ b/rust/cubesql/Cargo.toml\n@@ -9,7 +9,7 @@ documentation = \"https://cube.dev/docs\"\n homepage = \"https://cube.dev\"\n \n [dependencies]\n-datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\", default-features = false, features = [\"unicode_expressions\"] }\n+datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"868f3c4de13d13cda84cee33475b9782b94fa60c\", default-features = false, features = [\"unicode_expressions\"] }\n anyhow = \"1.0\"\n thiserror = \"1.0\"\n cubeclient = { path = \"../cubeclient\" }\ndiff --git a/rust/cubesql/src/compile/engine/df/intervals.rs b/rust/cubesql/src/compile/engine/df/intervals.rs\nnew file mode 100644\nindex 0000000..9e6cb7e\n--- /dev/null\n+++ b/rust/cubesql/src/compile/engine/df/intervals.rs\n@@ -0,0 +1,51 @@\n+#[macro_export]\n+macro_rules! make_string_interval_year_month {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let interval = $array.value($row) as f64;\n+ let years = (interval / 12_f64).floor();\n+ let month = interval - (years * 12_f64);\n+\n+ format!(\n+ \"{} years {} mons 0 days 0 hours 0 mins 0.00 secs\",\n+ years, month,\n+ )\n+ };\n+\n+ s\n+ }};\n+}\n+\n+#[macro_export]\n+macro_rules! make_string_interval_day_time {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let value: u64 = $array.value($row) as u64;\n+\n+ let days_parts: i32 = ((value & 0xFFFFFFFF00000000) >> 32) as i32;\n+ let milliseconds_part: i32 = (value & 0xFFFFFFFF) as i32;\n+\n+ let secs = milliseconds_part / 1000;\n+ let mins = secs / 60;\n+ let hours = mins / 60;\n+\n+ let secs = secs - (mins * 60);\n+ let mins = mins - (hours * 60);\n+\n+ format!(\n+ \"0 years 0 mons {} days {} hours {} mins {}.{:02} secs\",\n+ days_parts,\n+ hours,\n+ mins,\n+ secs,\n+ (milliseconds_part % 1000),\n+ )\n+ };\n+\n+ s\n+ }};\n+}\ndiff --git a/rust/cubesql/src/compile/engine/df/mod.rs b/rust/cubesql/src/compile/engine/df/mod.rs\nindex a19a970..3097523 100644\n--- a/rust/cubesql/src/compile/engine/df/mod.rs\n+++ b/rust/cubesql/src/compile/engine/df/mod.rs\n@@ -1 +1,2 @@\n pub mod coerce;\n+pub mod intervals;\ndiff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex 55b8bc1..0e160b3 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -1,14 +1,19 @@\n use std::any::type_name;\n use std::sync::Arc;\n \n+\n use datafusion::{\n arrow::{\n array::{\n ArrayRef, BooleanArray, BooleanBuilder, GenericStringArray, Int32Builder,\n- PrimitiveArray, StringBuilder, UInt32Builder,\n+ IntervalDayTimeBuilder, PrimitiveArray, StringBuilder,\n+ UInt32Builder,\n },\n compute::cast,\n- datatypes::{DataType, Int64Type},\n+ datatypes::{\n+ DataType, Int64Type, IntervalUnit, TimeUnit,\n+ TimestampNanosecondType,\n+ },\n },\n error::DataFusionError,\n logical_plan::create_udf,\n@@ -399,3 +404,63 @@ pub fn create_convert_tz_udf() -> ScalarUDF {\n &fun,\n )\n }\n+\n+pub fn create_timediff_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 2);\n+\n+ let left_dt = &args[0];\n+ let right_dt = &args[1];\n+\n+ let left_date = match left_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(left_dt, \"left_dt\", TimestampNanosecondType);\n+ let ts = arr.value(0);\n+\n+ // NaiveDateTime::from_timestamp(ts, 0)\n+ ts\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"left_dt argument must be a Timestamp, actual: {}\",\n+ left_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let right_date = match right_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(right_dt, \"right_dt\", TimestampNanosecondType);\n+ arr.value(0)\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"right_dt argument must be a Timestamp, actual: {}\",\n+ right_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let diff = right_date - left_date;\n+ if diff != 0 {\n+ return Err(DataFusionError::NotImplemented(format!(\n+ \"timediff is not implemented, it's stub\"\n+ )));\n+ }\n+\n+ let mut interal_arr = IntervalDayTimeBuilder::new(1);\n+ interal_arr.append_value(diff)?;\n+\n+ Ok(Arc::new(interal_arr.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime))));\n+\n+ ScalarUDF::new(\n+ \"timediff\",\n+ &Signature::any(2, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex a88da57..6121aa0 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -32,8 +32,8 @@ use self::engine::context::SystemVar;\n use self::engine::provider::CubeContext;\n use self::engine::udf::{\n create_connection_id_udf, create_convert_tz_udf, create_current_user_udf, create_db_udf,\n- create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_user_udf,\n- create_version_udf,\n+ create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_timediff_udf,\n+ create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1450,6 +1450,7 @@ impl QueryPlanner {\n ctx.register_udf(create_if_udf());\n ctx.register_udf(create_least_udf());\n ctx.register_udf(create_convert_tz_udf());\n+ ctx.register_udf(create_timediff_udf());\n \n let state = ctx.state.lock().unwrap().clone();\n let cube_ctx = CubeContext::new(&state, &self.context.cubes);\n@@ -3226,6 +3227,25 @@ mod tests {\n }\n \n #[tokio::test]\n+ async fn test_timediff() -> Result<(), CubeError> {\n+ assert_eq!(\n+ execute_df_query(\n+ \"select \\\n+ timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1\n+ \".to_string()\n+ )\n+ .await?,\n+ \"+------------------------------------------------+\\n\\\n+ | r1 |\\n\\\n+ +------------------------------------------------+\\n\\\n+ | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\\n\\\n+ +------------------------------------------------+\"\n+ );\n+\n+ Ok(())\n+ }\n+\n+ #[tokio::test]\n async fn test_metabase() -> Result<(), CubeError> {\n assert_eq!(\n execute_df_query(\ndiff --git a/rust/cubesql/src/mysql/dataframe.rs b/rust/cubesql/src/mysql/dataframe.rs\nindex fa246aa..2443458 100644\n--- a/rust/cubesql/src/mysql/dataframe.rs\n+++ b/rust/cubesql/src/mysql/dataframe.rs\n@@ -3,9 +3,10 @@ use std::fmt::{self, Debug, Formatter};\n use chrono::{SecondsFormat, TimeZone, Utc};\n use comfy_table::{Cell, Table};\n use datafusion::arrow::array::{\n- Array, Float64Array, Int32Array, Int64Array, StringArray, TimestampMicrosecondArray,\n- UInt32Array,\n+ Array, Float64Array, Int32Array, Int64Array, IntervalDayTimeArray, IntervalYearMonthArray,\n+ StringArray, TimestampMicrosecondArray, UInt32Array,\n };\n+use datafusion::arrow::datatypes::IntervalUnit;\n use datafusion::arrow::{\n array::{BooleanArray, TimestampNanosecondArray, UInt64Array},\n datatypes::{DataType, TimeUnit},\n@@ -15,6 +16,7 @@ use log::{error, warn};\n use msql_srv::{ColumnFlags, ColumnType};\n \n use crate::{compile::builder::CompiledQueryFieldMeta, CubeError};\n+use crate::{make_string_interval_day_time, make_string_interval_year_month};\n \n #[derive(Clone, Debug)]\n pub struct Column {\n@@ -309,6 +311,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro\n DataType::Binary => Ok(ColumnType::MYSQL_TYPE_BLOB),\n DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Timestamp(_, _) => Ok(ColumnType::MYSQL_TYPE_STRING),\n+ DataType::Interval(_) => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Float16 | DataType::Float64 => Ok(ColumnType::MYSQL_TYPE_DOUBLE),\n DataType::Boolean => Ok(ColumnType::MYSQL_TYPE_TINY),\n DataType::Int8\n@@ -402,6 +405,24 @@ pub fn batch_to_dataframe(batches: &Vec<RecordBatch>) -> Result<DataFrame, CubeE\n });\n }\n }\n+ DataType::Interval(IntervalUnit::DayTime) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalDayTimeArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_day_time!(a, i)));\n+ }\n+ }\n+ DataType::Interval(IntervalUnit::YearMonth) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalYearMonthArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_year_month!(a, i)));\n+ }\n+ }\n DataType::Boolean => {\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n for i in 0..num_rows {\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\nindex 24f1316..881c727 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n@@ -70,6 +70,14 @@ public class ReaderCloseTest {\n .getCluster()\n .getNodeId();\n clusteringRule.forceClusterToHaveNewLeader(followerId);\n+ // because of https://github.com/camunda-cloud/zeebe/issues/8329\n+ // we need to add another record so we can do a snapshot\n+ clientRule\n+ .getClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"test\")\n+ .correlationKey(\"test\")\n+ .send();\n \n // when\n clusteringRule.triggerAndWaitForSnapshots();\n@@ -78,6 +86,7 @@ public class ReaderCloseTest {\n for (final Broker broker : clusteringRule.getBrokers()) {\n assertThatFilesOfDeletedSegmentsDoesNotExist(broker);\n }\n+ assertThat(leaderId).isNotEqualTo(clusteringRule.getLeaderForPartition(1).getNodeId());\n }\n \n private void assertThatFilesOfDeletedSegmentsDoesNotExist(final Broker leader)\n", "diff --git a/storage/wal/verifier_test.go b/storage/wal/verifier_test.go\nindex 61e1536..a44755f 100644\n--- a/storage/wal/verifier_test.go\n+++ b/storage/wal/verifier_test.go\n@@ -138,22 +138,13 @@ func writeCorruptEntries(file *os.File, t *testing.T, n int) {\n \t\t}\n \t}\n \n-\n \t// Write some random bytes to the file to simulate corruption.\n \tif _, err := file.Write(corruption); err != nil {\n \t\tfatal(t, \"corrupt WAL segment\", err)\n \t}\n-\tcorrupt := []byte{1, 255, 0, 3, 45, 26, 110}\n-\n-\twrote, err := file.Write(corrupt)\n-\tif err != nil {\n-\t\tt.Fatal(err)\n-\t} else if wrote != len(corrupt) {\n-\t\tt.Fatal(\"Error writing corrupt data to file\")\n-\t}\n \n \tif err := file.Close(); err != nil {\n-\t\tt.Fatalf(\"Error: filed to close file: %v\\n\", err)\n+\t\tt.Fatalf(\"Error: failed to close file: %v\\n\", err)\n \t}\n }\n \n", "diff --git a/packages/noco-docs/docs/030.workspaces/040.actions-on-workspace.md b/packages/noco-docs/docs/030.workspaces/040.actions-on-workspace.md\nindex 17a1d85..b8c3f52 100644\n--- a/packages/noco-docs/docs/030.workspaces/040.actions-on-workspace.md\n+++ b/packages/noco-docs/docs/030.workspaces/040.actions-on-workspace.md\n@@ -20,7 +20,7 @@ To update the workspace name:\n ## Delete workspace\n If you determine that a workspace is no longer necessary, you have the option to permanently remove it from your settings. Deleting a workspace will delete all the bases and data associated with it.\n \n-:::danger\n+:::info\n **This action cannot be undone.**\n :::\n \ndiff --git a/packages/noco-docs/docs/040.bases/070.actions-on-base.md b/packages/noco-docs/docs/040.bases/070.actions-on-base.md\nindex b8e5723..7207971 100644\n--- a/packages/noco-docs/docs/040.bases/070.actions-on-base.md\n+++ b/packages/noco-docs/docs/040.bases/070.actions-on-base.md\n@@ -69,7 +69,7 @@ To duplicate a base, you can follow these straightforward steps:\n \n If you determine that a base is no longer necessary, you have the option to permanently remove it from your workspace. Deleting a base will delete all the tables and data associated with it.\n \n-:::danger\n+:::info\n **This action cannot be undone.**\n :::\n \ndiff --git a/packages/noco-docs/docs/050.tables/060.actions-on-table.md b/packages/noco-docs/docs/050.tables/060.actions-on-table.md\nindex 3cf03d3..8ae9ade 100644\n--- a/packages/noco-docs/docs/050.tables/060.actions-on-table.md\n+++ b/packages/noco-docs/docs/050.tables/060.actions-on-table.md\n@@ -46,7 +46,7 @@ A new table will be generated, mirroring the original table's schema and content\n \n ## Delete table\n \n-:::danger\n+:::info\n **This action cannot be undone.**\n :::\n \ndiff --git a/packages/noco-docs/docs/070.fields/060.actions-on-field.md b/packages/noco-docs/docs/070.fields/060.actions-on-field.md\nindex 600c6fd..fe2cfa8 100644\n--- a/packages/noco-docs/docs/070.fields/060.actions-on-field.md\n+++ b/packages/noco-docs/docs/070.fields/060.actions-on-field.md\n@@ -83,7 +83,7 @@ New field will be created to the right of the original field.\n New field will be created to the left of the original field.\n \n ### Delete field\n-:::danger\n+:::info\n **This action cannot be undone.**\n :::\n \ndiff --git a/packages/noco-docs/docs/080.records/070.actions-on-record.md b/packages/noco-docs/docs/080.records/070.actions-on-record.md\nindex a9245ff..6d4774a 100644\n--- a/packages/noco-docs/docs/080.records/070.actions-on-record.md\n+++ b/packages/noco-docs/docs/080.records/070.actions-on-record.md\n@@ -54,8 +54,8 @@ On the bulk update modal,\n 5. Click on the `Bulk Update all` button\n 6. A confirmation dialog will be displayed. Click on `Confirm` to update the records.\n \n-:::danger\n-This operation cannot be undone.\n+:::info\n+**This action cannot be undone.**\n :::\n \n ![Bulk Update](/img/v2/records/bulk-update-1.png)\ndiff --git a/packages/noco-docs/docs/090.views/090.actions-on-view.md b/packages/noco-docs/docs/090.views/090.actions-on-view.md\nindex c6c6ab2..7d23959 100644\n--- a/packages/noco-docs/docs/090.views/090.actions-on-view.md\n+++ b/packages/noco-docs/docs/090.views/090.actions-on-view.md\n@@ -41,7 +41,7 @@ The view context menu provides a set of tools to interact with the view. The vie\n \n ## Delete view\n \n-:::danger\n+:::info\n **This action cannot be undone.**\n :::\n \n"]
5
["3bc1541d6c95ef8cb5ce5da741733f09c98e4b29", "29dfb9716298c5a579c0ffba6742e13a29325670", "47df74d40becf915a9d89cdb887abd259b77def0", "fba4326c72fc22d81aba6976a9fef1e4b6154fd9", "2ba752d45350a676babe553dd68f019af81b512b"]
["ci", "feat", "test", "refactor", "docs"]
use module path alias,fix node test cases run into infinite loop,updated riot to v6, fixed build,abort parallel stages if one failed,add instruction for finding version
["diff --git a/src/background/audio-manager.ts b/src/background/audio-manager.ts\nindex 54e8b24..11c5fba 100644\n--- a/src/background/audio-manager.ts\n+++ b/src/background/audio-manager.ts\n@@ -2,7 +2,7 @@\n * To make sure only one audio plays at a time\n */\n \n-import { timeout } from '../_helpers/promise-more'\n+import { timeout } from '@/_helpers/promise-more'\n \n declare global {\n interface Window {\ndiff --git a/src/background/context-menus.ts b/src/background/context-menus.ts\nindex 994b59e..7036362 100644\n--- a/src/background/context-menus.ts\n+++ b/src/background/context-menus.ts\n@@ -1,5 +1,5 @@\n-import { storage, openURL } from '../_helpers/browser-api'\n-import { AppConfig } from '../app-config'\n+import { storage, openURL } from '@/_helpers/browser-api'\n+import { AppConfig } from '@/app-config'\n \n import { Observable } from 'rxjs/Observable'\n import { fromPromise } from 'rxjs/observable/fromPromise'\ndiff --git a/src/background/initialization.ts b/src/background/initialization.ts\nindex 0e5b3ad..001ee73 100644\n--- a/src/background/initialization.ts\n+++ b/src/background/initialization.ts\n@@ -1,6 +1,6 @@\n-import { storage, openURL } from '../_helpers/browser-api'\n-import checkUpdate from '../_helpers/check-update'\n-import { AppConfig } from '../app-config'\n+import { storage, openURL } from '@/_helpers/browser-api'\n+import checkUpdate from '@/_helpers/check-update'\n+import { AppConfig } from '@/app-config'\n import { mergeConfig } from './merge-config'\n import { init as initMenus } from './context-menus'\n import { init as initPdf } from './pdf-sniffer'\ndiff --git a/src/background/merge-config.ts b/src/background/merge-config.ts\nindex afa1800..afdbd63 100644\n--- a/src/background/merge-config.ts\n+++ b/src/background/merge-config.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import _ from 'lodash'\n \n /**\n@@ -24,7 +24,7 @@ function initConfig (): Promise<AppConfig> {\n const storageObj = { config: appConfigFactory() }\n \n Object.keys(storageObj.config.dicts.all).forEach(id => {\n- storageObj[id] = require('../components/dictionaries/' + id + '/config')\n+ storageObj[id] = require('@/components/dictionaries/' + id + '/config')\n })\n \n return browser.storage.sync.set(storageObj)\n@@ -70,7 +70,7 @@ function mergeHistorical (config): Promise<AppConfig> {\n \n const storageObj = { config: base }\n Object.keys(base.dicts.all).forEach(id => {\n- storageObj[id] = config.dicts.all[id] || require('../components/dictionaries/' + id + '/config')\n+ storageObj[id] = config.dicts.all[id] || require('@/components/dictionaries/' + id + '/config')\n })\n \n return browser.storage.sync.set(storageObj)\ndiff --git a/src/background/pdf-sniffer.ts b/src/background/pdf-sniffer.ts\nindex 6ba27cf..70aa38f 100644\n--- a/src/background/pdf-sniffer.ts\n+++ b/src/background/pdf-sniffer.ts\n@@ -2,8 +2,8 @@\n * Open pdf link directly\n */\n \n-import { storage } from '../_helpers/browser-api'\n-import { AppConfig } from '../app-config'\n+import { storage } from '@/_helpers/browser-api'\n+import { AppConfig } from '@/app-config'\n \n export function init (pdfSniff: boolean) {\n if (browser.webRequest.onBeforeRequest.hasListener(otherPdfListener)) {\ndiff --git a/src/background/server.ts b/src/background/server.ts\nindex 73b34b6..66ed5c0 100644\n--- a/src/background/server.ts\n+++ b/src/background/server.ts\n@@ -1,7 +1,7 @@\n-import { DictID } from '../app-config'\n-import { message, openURL } from '../_helpers/browser-api'\n+import { DictID } from '@/app-config'\n+import { message, openURL } from '@/_helpers/browser-api'\n import { play } from './audio-manager'\n-import { chsToChz } from '../_helpers/chs-to-chz'\n+import { chsToChz } from '@/_helpers/chs-to-chz'\n \n interface MessageOpenUrlWithEscape {\n type: 'OPEN_URL'\n@@ -63,7 +63,7 @@ function fetchDictResult (data: MessageFetchDictResult): Promise<void> {\n let search\n \n try {\n- search = require('../components/dictionaries/' + data.dict + '/engine.js')\n+ search = require('@/components/dictionaries/' + data.dict + '/engine.js')\n } catch (err) {\n return Promise.reject(err)\n }\ndiff --git a/test/unit/_helpers/browser-api.spec.ts b/test/unit/_helpers/browser-api.spec.ts\nindex 1f39145..e327169 100644\n--- a/test/unit/_helpers/browser-api.spec.ts\n+++ b/test/unit/_helpers/browser-api.spec.ts\n@@ -1,4 +1,4 @@\n-import { message, storage, openURL } from '../../../src/_helpers/browser-api'\n+import { message, storage, openURL } from '@/_helpers/browser-api'\n \n beforeEach(() => {\n browser.flush()\ndiff --git a/test/unit/_helpers/check-update.spec.ts b/test/unit/_helpers/check-update.spec.ts\nindex 2abfc57..fd0b678 100644\n--- a/test/unit/_helpers/check-update.spec.ts\n+++ b/test/unit/_helpers/check-update.spec.ts\n@@ -1,4 +1,4 @@\n-import checkUpdate from '../../../src/_helpers/check-update'\n+import checkUpdate from '@/_helpers/check-update'\n import fetchMock from 'jest-fetch-mock'\n \n describe('Check Update', () => {\ndiff --git a/test/unit/_helpers/chs-to-chz.spec.ts b/test/unit/_helpers/chs-to-chz.spec.ts\nindex 295c6ad..21d5229 100644\n--- a/test/unit/_helpers/chs-to-chz.spec.ts\n+++ b/test/unit/_helpers/chs-to-chz.spec.ts\n@@ -1,4 +1,4 @@\n-import chsToChz from '../../../src/_helpers/chs-to-chz'\n+import chsToChz from '@/_helpers/chs-to-chz'\n \n describe('Chs to Chz', () => {\n it('should convert chs to chz', () => {\ndiff --git a/test/unit/_helpers/fetch-dom.spec.ts b/test/unit/_helpers/fetch-dom.spec.ts\nindex a79dda0..bbfbf10 100644\n--- a/test/unit/_helpers/fetch-dom.spec.ts\n+++ b/test/unit/_helpers/fetch-dom.spec.ts\n@@ -1,4 +1,4 @@\n-import fetchDom from '../../../src/_helpers/fetch-dom'\n+import fetchDom from '@/_helpers/fetch-dom'\n \n class XMLHttpRequestMock {\n static queue: XMLHttpRequestMock[] = []\ndiff --git a/test/unit/_helpers/lang-check.spec.ts b/test/unit/_helpers/lang-check.spec.ts\nindex f3e668a..09f30bb 100644\n--- a/test/unit/_helpers/lang-check.spec.ts\n+++ b/test/unit/_helpers/lang-check.spec.ts\n@@ -1,4 +1,4 @@\n-import { isContainChinese, isContainEnglish } from '../../../src/_helpers/lang-check'\n+import { isContainChinese, isContainEnglish } from '@/_helpers/lang-check'\n \n describe('Language Check', () => {\n it('isContainChinese should return ture if text contains Chinese', () => {\ndiff --git a/test/unit/_helpers/promise-more.spec.ts b/test/unit/_helpers/promise-more.spec.ts\nindex 9601c7d..66dc8d9 100644\n--- a/test/unit/_helpers/promise-more.spec.ts\n+++ b/test/unit/_helpers/promise-more.spec.ts\n@@ -1,4 +1,4 @@\n-import * as pm from '../../../src/_helpers/promise-more'\n+import * as pm from '@/_helpers/promise-more'\n \n describe('Promise More', () => {\n beforeAll(() => {\ndiff --git a/test/unit/_helpers/selection.spec.ts b/test/unit/_helpers/selection.spec.ts\nindex 370239a..06812cf 100644\n--- a/test/unit/_helpers/selection.spec.ts\n+++ b/test/unit/_helpers/selection.spec.ts\n@@ -1,4 +1,4 @@\n-import selection from '../../../src/_helpers/selection'\n+import selection from '@/_helpers/selection'\n \n describe('Selection', () => {\n const bakSelection = window.getSelection\ndiff --git a/test/unit/_helpers/strip-script.spec.ts b/test/unit/_helpers/strip-script.spec.ts\nindex cce558f..355b382 100644\n--- a/test/unit/_helpers/strip-script.spec.ts\n+++ b/test/unit/_helpers/strip-script.spec.ts\n@@ -1,4 +1,4 @@\n-import stripScript from '../../../src/_helpers/strip-script'\n+import stripScript from '@/_helpers/strip-script'\n \n describe('Strip Script', () => {\n const expectedEl = document.createElement('div') as HTMLDivElement\ndiff --git a/test/unit/background/audio-manager.spec.ts b/test/unit/background/audio-manager.spec.ts\nindex b0096a6..b1266d7 100644\n--- a/test/unit/background/audio-manager.spec.ts\n+++ b/test/unit/background/audio-manager.spec.ts\n@@ -1,4 +1,4 @@\n-import audio from '../../../src/background/audio-manager'\n+import audio from '@/background/audio-manager'\n \n describe('Audio Manager', () => {\n const bakAudio = (window as any).Audio\ndiff --git a/test/unit/background/context-menus.spec.ts b/test/unit/background/context-menus.spec.ts\nindex 39e249c..d9049dc 100644\n--- a/test/unit/background/context-menus.spec.ts\n+++ b/test/unit/background/context-menus.spec.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import sinon from 'sinon'\n \n function specialConfig () {\n@@ -11,7 +11,7 @@ describe('Context Menus', () => {\n beforeAll(() => {\n browser.flush()\n jest.resetModules()\n- require('../../../src/background/context-menus')\n+ require('@/background/context-menus')\n })\n afterAll(() => browser.flush())\n \n@@ -93,7 +93,7 @@ describe('Context Menus', () => {\n browser.contextMenus.create.callsFake((_, cb) => cb())\n config = specialConfig()\n jest.resetModules()\n- const { init } = require('../../../src/background/context-menus')\n+ const { init } = require('@/background/context-menus')\n init(config.contextMenus)\n })\n \n@@ -110,7 +110,7 @@ describe('Context Menus', () => {\n it('should not init setup when called multiple times', () => {\n expect(browser.contextMenus.removeAll.calledOnce).toBeTruthy()\n \n- const { init } = require('../../../src/background/context-menus')\n+ const { init } = require('@/background/context-menus')\n init(config.contextMenus)\n init(config.contextMenus)\n \ndiff --git a/test/unit/background/initialization.spec.ts b/test/unit/background/initialization.spec.ts\nindex 7bc0972..56a6389 100644\n--- a/test/unit/background/initialization.spec.ts\n+++ b/test/unit/background/initialization.spec.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import fetchMock from 'jest-fetch-mock'\n import sinon from 'sinon'\n \n@@ -11,12 +11,12 @@ describe('Initialization', () => {\n const checkUpdate = jest.fn().mockReturnValue(Promise.resolve())\n \n beforeAll(() => {\n- const { message, storage } = require('../../../src/_helpers/browser-api')\n+ const { message, storage } = require('@/_helpers/browser-api')\n window.fetch = fetchMock\n \n browser.flush()\n jest.resetModules()\n- jest.doMock('../../../src/background/merge-config', () => {\n+ jest.doMock('@/background/merge-config', () => {\n return {\n mergeConfig (config) {\n mergeConfig(config)\n@@ -24,16 +24,16 @@ describe('Initialization', () => {\n }\n }\n })\n- jest.doMock('../../../src/background/context-menus', () => {\n+ jest.doMock('@/background/context-menus', () => {\n return { init: initMenus }\n })\n- jest.doMock('../../../src/background/pdf-sniffer', () => {\n+ jest.doMock('@/background/pdf-sniffer', () => {\n return { init: initPdf }\n })\n- jest.doMock('../../../src/_helpers/check-update', () => {\n+ jest.doMock('@/_helpers/check-update', () => {\n return checkUpdate\n })\n- jest.doMock('../../../src/_helpers/browser-api', () => {\n+ jest.doMock('@/_helpers/browser-api', () => {\n return {\n message,\n storage,\n@@ -41,13 +41,13 @@ describe('Initialization', () => {\n }\n })\n \n- require('../../../src/background/initialization')\n+ require('@/background/initialization')\n })\n afterAll(() => {\n browser.flush()\n- jest.dontMock('../../../src/background/merge-config')\n- jest.dontMock('../../../src/background/context-menus')\n- jest.dontMock('../../../src/_helpers/browser-api')\n+ jest.dontMock('@/background/merge-config')\n+ jest.dontMock('@/background/context-menus')\n+ jest.dontMock('@/_helpers/browser-api')\n window.fetch = bakFetch\n })\n \ndiff --git a/test/unit/background/merge-config.spec.ts b/test/unit/background/merge-config.spec.ts\nindex 73c047d..c0dce26 100644\n--- a/test/unit/background/merge-config.spec.ts\n+++ b/test/unit/background/merge-config.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig, AppConfigMutable } from '../../../src/app-config'\n-import mergeConfig from '../../../src/background/merge-config'\n+import { appConfigFactory, AppConfig, AppConfigMutable } from '@/app-config'\n+import mergeConfig from '@/background/merge-config'\n import sinon from 'sinon'\n \n describe('Merge Config', () => {\ndiff --git a/test/unit/background/pdf-sniffer.spec.ts b/test/unit/background/pdf-sniffer.spec.ts\nindex a0219d2..bb7726f 100644\n--- a/test/unit/background/pdf-sniffer.spec.ts\n+++ b/test/unit/background/pdf-sniffer.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n-import { init as initPdf } from '../../../src/background/pdf-sniffer'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n+import { init as initPdf } from '@/background/pdf-sniffer'\n import sinon from 'sinon'\n \n function hasListenerPatch (fn) {\ndiff --git a/test/unit/background/server.spec.ts b/test/unit/background/server.spec.ts\nindex b8ef065..aa04525 100644\n--- a/test/unit/background/server.spec.ts\n+++ b/test/unit/background/server.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n-import * as browserWrap from '../../../src/_helpers/browser-api'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n+import * as browserWrap from '@/_helpers/browser-api'\n import sinon from 'sinon'\n \n describe('Server', () => {\n@@ -13,26 +13,26 @@ describe('Server', () => {\n browserWrap.openURL = openURL\n \n beforeAll(() => {\n- jest.doMock('../../../src/_helpers/chs-to-chz', () => {\n+ jest.doMock('@/_helpers/chs-to-chz', () => {\n return { chsToChz }\n })\n- jest.doMock('../../../src/background/audio-manager', () => {\n+ jest.doMock('@/background/audio-manager', () => {\n return { play }\n })\n- jest.doMock('../../../src/_helpers/browser-api', () => {\n+ jest.doMock('@/_helpers/browser-api', () => {\n return browserWrap\n })\n- jest.doMock('../../../src/components/dictionaries/bing/engine.js', () => {\n+ jest.doMock('@/components/dictionaries/bing/engine.js', () => {\n return bingSearch\n })\n })\n \n afterAll(() => {\n browser.flush()\n- jest.dontMock('../../../src/_helpers/chs-to-chz')\n- jest.dontMock('../../../src/background/audio-manager')\n- jest.dontMock('../../../src/_helpers/browser-api')\n- jest.dontMock('../../../src/components/dictionaries/bing/engine.js')\n+ jest.dontMock('@/_helpers/chs-to-chz')\n+ jest.dontMock('@/background/audio-manager')\n+ jest.dontMock('@/_helpers/browser-api')\n+ jest.dontMock('@/components/dictionaries/bing/engine.js')\n })\n \n beforeEach(() => {\n@@ -46,7 +46,7 @@ describe('Server', () => {\n bingSearch.mockReset()\n bingSearch.mockImplementation(() => Promise.resolve())\n jest.resetModules()\n- require('../../../src/background/server')\n+ require('@/background/server')\n })\n \n it('should properly init', () => {\n", "diff --git a/packages/designer/tests/document/node/node.test.ts b/packages/designer/tests/document/node/node.test.ts\nindex dd20bd3..113360d 100644\n--- a/packages/designer/tests/document/node/node.test.ts\n+++ b/packages/designer/tests/document/node/node.test.ts\n@@ -26,7 +26,7 @@ import rootHeaderMetadata from '../../fixtures/component-metadata/root-header';\n import rootContentMetadata from '../../fixtures/component-metadata/root-content';\n import rootFooterMetadata from '../../fixtures/component-metadata/root-footer';\n \n-describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n+describe('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n let editor: Editor;\n let designer: Designer;\n let project: Project;\n@@ -474,15 +474,16 @@ describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n it('didDropIn / didDropOut', () => {\n const form = doc.getNode('node_k1ow3cbo');\n designer.createComponentMeta(divMetadata);\n+ designer.createComponentMeta(formMetadata);\n const callbacks = form.componentMeta.getMetadata().configure.advanced?.callbacks;\n const fn1 = callbacks.onNodeAdd = jest.fn();\n const fn2 = callbacks.onNodeRemove = jest.fn();\n const textField = doc.getNode('node_k1ow3cc9');\n form.didDropIn(textField);\n- expect(fn1).toHaveBeenCalledWith(textField, form);\n+ expect(fn1).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n \n form.didDropOut(textField);\n- expect(fn2).toHaveBeenCalledWith(textField, form);\n+ expect(fn2).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n });\n \n it('hover', () => {\n", "diff --git a/components/riot/package.json b/components/riot/package.json\nindex c41743a..eb69756 100644\n--- a/components/riot/package.json\n+++ b/components/riot/package.json\n@@ -61,7 +61,7 @@\n },\n \"devDependencies\": {\n \"@babel/preset-typescript\": \"^7.14.5\",\n- \"@riotjs/cli\": \"^6.0.4\",\n+ \"@riotjs/cli\": \"^6.0.5\",\n \"@riotjs/compiler\": \"^6.0.0\",\n \"chai\": \"^4.3.4\",\n \"esm\": \"^3.2.25\",\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 168f446..a4da961 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -28,6 +28,7 @@ pipeline {\n }\n \n stage('Verify') {\n+ failFast true\n parallel {\n stage('Tests') {\n steps {\n", "diff --git a/.github/ISSUE_TEMPLATE/_bug_report_chs.md b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\nindex 42a2e0f..44a33db 100644\n--- a/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n+++ b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n@@ -36,7 +36,7 @@ assignees: ''\n ## \u8bbe\u5907\u4fe1\u606f\n - \u64cd\u4f5c\u7cfb\u7edf: [] <!-- \u5982 [Window10] -->\n - \u6d4f\u89c8\u5668\u7248\u672c: [] <!-- \u5982 [Chrome77] -->\n-- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] -->\n+- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] \uff08\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u67e5\u770b\uff09 -->\n \n <!-- \u8bf7\u5728\u4e0b\u65b9 ## \u5f00\u5934\u884c\u4e4b\u95f4\u7684\u7a7a\u767d\u5904\u586b\u5199 -->\n \n"]
5
["8246d024f21d93cc092e19bede5f7b3a5325c8dc", "d2c3f0ba6f85b659b76636a91ea9ab2b5a95a720", "5d256f937f93e5a5ed003df86d38c44834095a11", "28e623b294816c4e070971782a75c8697a11966f", "af0a5f7ab9d71fe20aa0888f682368f32b26fe18"]
["refactor", "test", "build", "ci", "docs"]
add page balckwhitelist and pdf,ecma 7 ready,fix deploy,add test for spurious cross join,export order
["diff --git a/src/_locales/common/messages.json b/src/_locales/common/messages.json\nindex e8524ac..3a596d6 100644\n--- a/src/_locales/common/messages.json\n+++ b/src/_locales/common/messages.json\n@@ -4,11 +4,21 @@\n \"zh_CN\": \"\u6dfb\u52a0\",\n \"zh_TW\": \"\u65b0\u589e\"\n },\n+ \"blacklist\": {\n+ \"en\": \"Blacklist\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\"\n+ },\n \"cancel\": {\n \"en\": \"Cancel\",\n \"zh_CN\": \"\u53d6\u6d88\",\n \"zh_TW\": \"\u53d6\u6d88\"\n },\n+ \"changes_confirm\": {\n+ \"en\": \"Changes not saved. Close anyway?\",\n+ \"zh_CN\": \"\u4fee\u6539\u672a\u4fdd\u5b58\u3002\u786e\u8ba4\u5173\u95ed\uff1f\",\n+ \"zh_TW\": \"\u4fee\u6539\u672a\u4fdd\u5b58\u3002\u78ba\u5b9a\u95dc\u9589\uff1f\"\n+ },\n \"confirm\": {\n \"en\": \"Confirm\",\n \"zh_CN\": \"\u786e\u8ba4\",\n@@ -93,5 +103,10 @@\n \"en\": \"words\",\n \"zh_CN\": \"\u4e2a\",\n \"zh_TW\": \"\u4e2a\"\n+ },\n+ \"whitelist\": {\n+ \"en\": \"Whitelist\",\n+ \"zh_CN\": \"\u767d\u540d\u5355\",\n+ \"zh_TW\": \"\u767d\u540d\u55ae\"\n }\n }\ndiff --git a/src/_locales/options/messages.json b/src/_locales/options/messages.json\nindex ada2488..e7d699a 100644\n--- a/src/_locales/options/messages.json\n+++ b/src/_locales/options/messages.json\n@@ -119,6 +119,11 @@\n \"zh_CN\": \"\u53cd\u9988\u95ee\u9898\",\n \"zh_TW\": \"\u8edf\u9ad4\u4f7f\u7528\u7591\u554f\u548c\u5efa\u8a00\"\n },\n+ \"match_pattern_description\": {\n+ \"en\": \"Specify URLs as match patterns. <a href=\\\"https://developer.mozilla.org/en-US/Add-ons/WebExtensions/Match_patterns#Examples\\\" target=\\\"_blank\\\">Examples</a>. Empty fields will be removed.\",\n+ \"zh_CN\": \"\u7f51\u5740\u652f\u6301\u5339\u914d\u6a21\u5f0f\uff08<a href=\\\"https://developer.mozilla.org/zh-CN/Add-ons/WebExtensions/Match_patterns#\u8303\u4f8b\\\" target=\\\"_blank\\\">\u4f8b\u5b50</a>\uff09\u3002\u7559\u7a7a\u4fdd\u5b58\u5373\u53ef\u6e05\u9664\u3002\",\n+ \"zh_TW\": \"\u7db2\u5740\u652f\u63f4\u5339\u914d\u6a21\u5f0f\uff08<a href=\\\"https://developer.mozilla.org/zh-CN/Add-ons/WebExtensions/Match_patterns#\u8303\u4f8b\\\" target=\\\"_blank\\\">\u4f8b\u5b50</a>\uff09\u3002\u7559\u7a7a\u5132\u5b58\u5373\u53ef\u6e05\u9664\u3002\"\n+ },\n \"msg_updated\": {\n \"en\": \"Successfully updated\",\n \"zh_CN\": \"\u8bbe\u7f6e\u5df2\u66f4\u65b0\",\n@@ -319,6 +324,21 @@\n \"zh_CN\": \"\u5f00\u542f\u540e\uff0c\u672c\u6269\u5c55\u4f1a\u81ea\u52a8\u8bc6\u522b\u8f93\u5165\u6846\u4ee5\u53ca\u5e38\u89c1\u7f16\u8f91\u5668\uff0c\u5982 CodeMirror\u3001ACE \u548c Monaco\u3002\",\n \"zh_TW\": \"\u958b\u555f\u540e\uff0c\u672c\u7a0b\u5f0f\u6703\u81ea\u52d5\u8b58\u5225\u8f38\u5165\u6846\u4ee5\u53ca\u5e38\u898b\u7de8\u8f2f\u5668\uff0c\u5982 CodeMirror\u3001ACE \u548c Monaco\u3002\"\n },\n+ \"opt_pdf_blackwhitelist_help\": {\n+ \"en\": \"Blacklisted PDF links will not jump to Saladict PDF Viewer.\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\u5339\u914d\u7684 PDF \u94fe\u63a5\u5c06\u4e0d\u4f1a\u8df3\u8f6c\u5230 Saladict \u6253\u5f00\u3002\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\u5339\u914d\u7684 PDF \u9023\u7d50\u5c07\u4e0d\u6703\u8df3\u8f49\u5230 Saladict \u958b\u555f\u3002\"\n+ },\n+ \"opt_pdf_sniff\": {\n+ \"en\": \"Enable PDF Sniffer\",\n+ \"zh_CN\": \"\u9ed8\u8ba4\u7528\u672c\u6269\u5c55\u6d4f\u89c8 PDF\",\n+ \"zh_TW\": \"\u4f7f\u7528\u672c\u61c9\u7528\u7a0b\u5f0f\u700f\u89bd PDF\"\n+ },\n+ \"opt_pdf_sniff_help\": {\n+ \"en\": \"If turned on\uff0c PDF links will be automatically captured.\",\n+ \"zh_CN\": \"\u5f00\u542f\u540e\u6240\u6709 PDF \u94fe\u63a5\u5c06\u81ea\u52a8\u8df3\u8f6c\u5230\u672c\u6269\u5c55\u6253\u5f00\uff08\u5305\u62ec\u672c\u5730\uff0c\u5982\u679c\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u52fe\u9009\u4e86\u5141\u8bb8\uff09\u3002\",\n+ \"zh_TW\": \"\u958b\u555f\u5f8c\u6240\u6709 PDF \u9023\u7d50\u5c07\u81ea\u52d5\u8df3\u8f49\u5230\u672c\u64f4\u5145\u5957\u4ef6\u958b\u555f\uff08\u5305\u62ec\u672c\u5730\uff0c\u5982\u679c\u5728\u64f4\u5145\u5957\u4ef6\u7ba1\u7406\u9801\u9762\u52fe\u9078\u4e86\u5141\u8a31\uff09\u3002\"\n+ },\n \"opt_profile_change\": {\n \"en\": \"This option may change base on \\\"Profile\\\".\",\n \"zh_CN\": \"\u6b64\u9009\u9879\u4f1a\u56e0\u300c\u60c5\u666f\u6a21\u5f0f\u300d\u800c\u6539\u53d8\u3002\",\n@@ -329,6 +349,16 @@\n \"zh_CN\": \"\u8f93\u5165\u65f6\u663e\u793a\u5019\u9009\",\n \"zh_TW\": \"\u8f38\u5165\u6642\u986f\u793a\u5019\u9078\"\n },\n+ \"opt_sel_blackwhitelist\": {\n+ \"en\": \"Selection Black/White List\",\n+ \"zh_CN\": \"\u5212\u8bcd\u9ed1\u767d\u540d\u5355\",\n+ \"zh_TW\": \"\u9078\u8a5e\u9ed1\u767d\u540d\u55ae\"\n+ },\n+ \"opt_sel_blackwhitelist_help\": {\n+ \"en\": \"Saladict will not react to selection in blacklisted pages.\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\u5339\u914d\u7684\u9875\u9762 Saladict \u5c06\u4e0d\u4f1a\u54cd\u5e94\u9f20\u6807\u5212\u8bcd\u3002\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\u5339\u914d\u7684\u9801\u9762 Saladict \u5c07\u4e0d\u6703\u97ff\u61c9\u6ed1\u9f20\u5283\u8a5e\u3002\"\n+ },\n \"opt_sel_lang\": {\n \"en\": \"Selection Languages\",\n \"zh_CN\": \"\u5212\u8bcd\u8bed\u8a00\",\ndiff --git a/src/options/components/options/BlackWhiteList/index.tsx b/src/options/components/options/BlackWhiteList/index.tsx\nnew file mode 100644\nindex 0000000..52708dd\n--- /dev/null\n+++ b/src/options/components/options/BlackWhiteList/index.tsx\n@@ -0,0 +1,69 @@\n+import React from 'react'\n+import { Props } from '../typings'\n+import { formItemLayout } from '../helpers'\n+import MatchPatternModal from '../../MatchPatternModal'\n+\n+import { FormComponentProps } from 'antd/lib/form'\n+import { Form, Button } from 'antd'\n+\n+export type BlackWhiteListProps = Props & FormComponentProps\n+\n+interface BlackWhiteListState {\n+ editingArea: '' | 'pdfWhitelist' | 'pdfBlacklist' | 'whitelist' | 'blacklist'\n+}\n+\n+export class BlackWhiteList extends React.Component<BlackWhiteListProps, BlackWhiteListState> {\n+ constructor (props: BlackWhiteListProps) {\n+ super(props)\n+ this.state = {\n+ editingArea: ''\n+ }\n+ }\n+\n+ closeModal = () => {\n+ this.setState({ editingArea: '' })\n+ }\n+\n+ render () {\n+ const { t, config } = this.props\n+\n+ return (\n+ <Form>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('opt_sel_blackwhitelist')}\n+ help={t('opt_sel_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'blacklist' })}\n+ >{t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'whitelist' })}\n+ >{t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={`PDF ${t('nav_BlackWhiteList')}`}\n+ help={t('opt_pdf_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'pdfBlacklist' })}\n+ >PDF {t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'pdfWhitelist' })}\n+ >PDF {t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <MatchPatternModal\n+ t={t}\n+ config={config}\n+ area={this.state.editingArea}\n+ onClose={this.closeModal}\n+ />\n+ </Form>\n+ )\n+ }\n+}\n+\n+export default BlackWhiteList\ndiff --git a/src/options/components/options/PDF/index.tsx b/src/options/components/options/PDF/index.tsx\nnew file mode 100644\nindex 0000000..3e7772d\n--- /dev/null\n+++ b/src/options/components/options/PDF/index.tsx\n@@ -0,0 +1,72 @@\n+import React from 'react'\n+import { Props } from '../typings'\n+import { updateConfigOrProfile, formItemLayout } from '../helpers'\n+import MatchPatternModal from '../../MatchPatternModal'\n+\n+import { FormComponentProps } from 'antd/lib/form'\n+import { Form, Switch, Button } from 'antd'\n+\n+export type PDFProps = Props & FormComponentProps\n+\n+interface PDFState {\n+ editingArea: '' | 'pdfWhitelist' | 'pdfBlacklist'\n+}\n+\n+export class PDF extends React.Component<PDFProps, PDFState> {\n+ constructor (props: PDFProps) {\n+ super(props)\n+\n+ this.state = {\n+ editingArea: ''\n+ }\n+ }\n+\n+ closeModal = () => {\n+ this.setState({ editingArea: '' })\n+ }\n+\n+ render () {\n+ const { t, config } = this.props\n+ const { getFieldDecorator } = this.props.form\n+\n+ return (\n+ <Form>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('opt_pdf_sniff')}\n+ help={t('opt_pdf_sniff_help')}\n+ >{\n+ getFieldDecorator('config#pdfSniff', {\n+ initialValue: config.pdfSniff,\n+ valuePropName: 'checked',\n+ })(\n+ <Switch />\n+ )\n+ }</Form.Item>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('nav_BlackWhiteList')}\n+ help={t('opt_pdf_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'pdfBlacklist' })}\n+ >PDF {t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'pdfWhitelist' })}\n+ >PDF {t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <MatchPatternModal\n+ t={t}\n+ config={config}\n+ area={this.state.editingArea}\n+ onClose={this.closeModal}\n+ />\n+ </Form>\n+ )\n+ }\n+}\n+\n+export default Form.create<PDFProps>({\n+ onValuesChange: updateConfigOrProfile\n+})(PDF)\n", "diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex f7c6b23..4a00c65 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -266,7 +266,7 @@ module.exports = {\n : new UglifyJsPlugin({\n uglifyOptions: {\n ie8: false,\n- ecma: 6,\n+ ecma: 7,\n compress: {\n warnings: false,\n // Disabled because of an issue with Uglify breaking seemingly valid code:\n", "diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml\nindex 3830f4c..3b14ee5 100644\n--- a/.github/workflows/deploy.yaml\n+++ b/.github/workflows/deploy.yaml\n@@ -67,7 +67,7 @@ jobs:\n run: aws s3 cp .next/static s3://cdn.rs.school/_next/static/ --recursive --cache-control \"public,max-age=15552000,immutable\"\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -117,7 +117,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -167,7 +167,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n", "diff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 4ad32a6..b2e5d72 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -841,3 +841,63 @@ def test_filter_group_by_agg_with_same_name():\n )\n ex = sa.select([t0]).where(t0.c.bigint_col == 60)\n _check(expr, ex)\n+\n+\[email protected]\n+def person():\n+ return ibis.table(\n+ dict(id=\"string\", personal=\"string\", family=\"string\"),\n+ name=\"person\",\n+ )\n+\n+\[email protected]\n+def visited():\n+ return ibis.table(\n+ dict(id=\"int32\", site=\"string\", dated=\"string\"),\n+ name=\"visited\",\n+ )\n+\n+\[email protected]\n+def survey():\n+ return ibis.table(\n+ dict(\n+ taken=\"int32\",\n+ person=\"string\",\n+ quant=\"string\",\n+ reading=\"float32\",\n+ ),\n+ name=\"survey\",\n+ )\n+\n+\n+def test_no_cross_join(person, visited, survey):\n+ expr = person.join(survey, person.id == survey.person).join(\n+ visited,\n+ visited.id == survey.taken,\n+ )\n+\n+ context = AlchemyContext(compiler=AlchemyCompiler)\n+ _ = AlchemyCompiler.to_sql(expr, context)\n+\n+ t0 = context.get_ref(person)\n+ t1 = context.get_ref(survey)\n+ t2 = context.get_ref(visited)\n+\n+ from_ = t0.join(t1, t0.c.id == t1.c.person).join(t2, t2.c.id == t1.c.taken)\n+ ex = sa.select(\n+ [\n+ t0.c.id.label(\"id_x\"),\n+ t0.c.personal,\n+ t0.c.family,\n+ t1.c.taken,\n+ t1.c.person,\n+ t1.c.quant,\n+ t1.c.reading,\n+ t2.c.id.label(\"id_y\"),\n+ t2.c.site,\n+ t2.c.dated,\n+ ]\n+ ).select_from(from_)\n+ _check(expr, ex)\n", "diff --git a/src/Object/_api.ts b/src/Object/_api.ts\nindex a1e5c8c..70e9fea 100644\n--- a/src/Object/_api.ts\n+++ b/src/Object/_api.ts\n@@ -38,9 +38,9 @@ export {Optional} from './Optional'\n export {OptionalKeys} from './OptionalKeys'\n export {Overwrite} from './Overwrite'\n export {Partial} from './Partial'\n+export {Path} from './Path'\n export {Paths} from './Paths'\n export {PathValid} from './PathValid'\n-export {Path} from './Path'\n export {Pick} from './Pick'\n export {Readonly} from './Readonly'\n export {ReadonlyKeys} from './ReadonlyKeys'\n"]
5
["9b1c0fc20b614513384a1e562317dbf076eb8ef0", "6aa63c9b8d4dcdbb401743adc3c9a1020d943250", "7785be09053049b30cf41b420c59f051cd0129fc", "8dac3fe5a7a56356ca95547fcf7925bec8d9c1dd", "879edb6ed90f88b9ae6a3c2e8878ae1be48e0c88"]
["feat", "build", "ci", "test", "refactor"]
updated riot to v6, fixed build,skip ruff format in pre-commit ci runner,do not query all networks,Add the select function for logicflow,fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>
["diff --git a/components/riot/package.json b/components/riot/package.json\nindex c41743a..eb69756 100644\n--- a/components/riot/package.json\n+++ b/components/riot/package.json\n@@ -61,7 +61,7 @@\n },\n \"devDependencies\": {\n \"@babel/preset-typescript\": \"^7.14.5\",\n- \"@riotjs/cli\": \"^6.0.4\",\n+ \"@riotjs/cli\": \"^6.0.5\",\n \"@riotjs/compiler\": \"^6.0.0\",\n \"chai\": \"^4.3.4\",\n \"esm\": \"^3.2.25\",\n", "diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 6193d96..4ba39d6 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -9,6 +9,7 @@ ci:\n - nixpkgs-fmt\n - prettier\n - ruff\n+ - ruff-format\n - shellcheck\n - shfmt\n - statix\n", "diff --git a/src/environment/windows_win32.go b/src/environment/windows_win32.go\nindex be0c7b5..b90e0ff 100644\n--- a/src/environment/windows_win32.go\n+++ b/src/environment/windows_win32.go\n@@ -203,7 +203,6 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \tvar pIFTable2 *MIN_IF_TABLE2\n \t_, _, _ = hGetIfTable2.Call(uintptr(unsafe.Pointer(&pIFTable2)))\n \n-\tSSIDs, _ := env.getAllWifiSSID()\n \tnetworks := make([]*Connection, 0)\n \n \tfor i := 0; i < int(pIFTable2.NumEntries); i++ {\n@@ -220,11 +219,13 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t}\n \n \t\tvar connectionType ConnectionType\n+\t\tvar ssid string\n \t\tswitch networkInterface.Type {\n \t\tcase 6:\n \t\t\tconnectionType = ETHERNET\n \t\tcase 71:\n \t\t\tconnectionType = WIFI\n+\t\t\tssid = env.getWiFiSSID(networkInterface.InterfaceGUID)\n \t\tcase 237, 234, 244:\n \t\t\tconnectionType = CELLULAR\n \t\t}\n@@ -243,10 +244,7 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t\tName: description, // we want a relatable name, alias isn't that\n \t\t\tTransmitRate: networkInterface.TransmitLinkSpeed,\n \t\t\tReceiveRate: networkInterface.ReceiveLinkSpeed,\n-\t\t}\n-\n-\t\tif SSID, OK := SSIDs[network.Name]; OK {\n-\t\t\tnetwork.SSID = SSID\n+\t\t\tSSID: ssid,\n \t\t}\n \n \t\tnetworks = append(networks, network)\n@@ -322,13 +320,21 @@ type MIB_IF_ROW2 struct { //nolint: revive\n \tOutQLen uint64\n }\n \n-func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n+var (\n+\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n+\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n+\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n+\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n+)\n+\n+func (env *ShellEnvironment) getWiFiSSID(guid windows.GUID) string {\n+\t// Query wifi connection state\n \tvar pdwNegotiatedVersion uint32\n \tvar phClientHandle uint32\n \te, _, err := hWlanOpenHandle.Call(uintptr(uint32(2)), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&pdwNegotiatedVersion)), uintptr(unsafe.Pointer(&phClientHandle)))\n \tif e != 0 {\n \t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n+\t\treturn \"\"\n \t}\n \n \t// defer closing handle\n@@ -336,42 +342,11 @@ func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n \t\t_, _, _ = hWlanCloseHandle.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)))\n \t}()\n \n-\tssid := make(map[string]string)\n-\t// list interfaces\n-\tvar interfaceList *WLAN_INTERFACE_INFO_LIST\n-\te, _, err = hWlanEnumInterfaces.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&interfaceList)))\n-\tif e != 0 {\n-\t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n-\t}\n-\n-\t// use first interface that is connected\n-\tnumberOfInterfaces := int(interfaceList.dwNumberOfItems)\n-\tinfoSize := unsafe.Sizeof(interfaceList.InterfaceInfo[0])\n-\tfor i := 0; i < numberOfInterfaces; i++ {\n-\t\tnetwork := (*WLAN_INTERFACE_INFO)(unsafe.Pointer(uintptr(unsafe.Pointer(&interfaceList.InterfaceInfo[0])) + uintptr(i)*infoSize))\n-\t\tif network.isState == 1 {\n-\t\t\twifiInterface := strings.TrimRight(string(utf16.Decode(network.strInterfaceDescription[:])), \"\\x00\")\n-\t\t\tssid[wifiInterface] = env.getWiFiSSID(network, phClientHandle)\n-\t\t}\n-\t}\n-\treturn ssid, nil\n-}\n-\n-var (\n-\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n-\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n-\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n-\thWlanEnumInterfaces = wlanapi.NewProc(\"WlanEnumInterfaces\")\n-\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n-)\n-\n-func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHandle uint32) string {\n-\t// Query wifi connection state\n \tvar dataSize uint16\n \tvar wlanAttr *WLAN_CONNECTION_ATTRIBUTES\n-\te, _, _ := hWlanQueryInterface.Call(uintptr(clientHandle),\n-\t\tuintptr(unsafe.Pointer(&network.InterfaceGuid)),\n+\n+\te, _, _ = hWlanQueryInterface.Call(uintptr(phClientHandle),\n+\t\tuintptr(unsafe.Pointer(&guid)),\n \t\tuintptr(7), // wlan_intf_opcode_current_connection\n \t\tuintptr(unsafe.Pointer(nil)),\n \t\tuintptr(unsafe.Pointer(&dataSize)),\n@@ -389,18 +364,6 @@ func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHan\n \treturn string(ssid.ucSSID[0:ssid.uSSIDLength])\n }\n \n-type WLAN_INTERFACE_INFO_LIST struct { //nolint: revive\n-\tdwNumberOfItems uint32\n-\tdwIndex uint32 //nolint: unused\n-\tInterfaceInfo [256]WLAN_INTERFACE_INFO\n-}\n-\n-type WLAN_INTERFACE_INFO struct { //nolint: revive\n-\tInterfaceGuid syscall.GUID //nolint: revive\n-\tstrInterfaceDescription [256]uint16\n-\tisState uint32\n-}\n-\n type WLAN_CONNECTION_ATTRIBUTES struct { //nolint: revive\n \tisState uint32 //nolint: unused\n \twlanConnectionMode uint32 //nolint: unused\n", "diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\nindex 0d913b7..dcc59b3 100644\n--- a/packages/core/src/LogicFlow.tsx\n+++ b/packages/core/src/LogicFlow.tsx\n@@ -276,6 +276,12 @@ export default class LogicFlow {\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\n }\n /**\n+ * \u5c06\u56fe\u5f62\u9009\u4e2d\n+ */\n+ select(id: string) {\n+ this.graphModel.selectElementById(id);\n+ }\n+ /**\n * \u5c06\u56fe\u5f62\u5b9a\u4f4d\u5230\u753b\u5e03\u4e2d\u5fc3\n * @param focusOnArgs \u652f\u6301\u7528\u6237\u4f20\u5165\u56fe\u5f62\u5f53\u524d\u7684\u5750\u6807\u6216id\uff0c\u53ef\u4ee5\u901a\u8fc7type\u6765\u533a\u5206\u662f\u8282\u70b9\u8fd8\u662f\u8fde\u7ebf\u7684id\uff0c\u4e5f\u53ef\u4ee5\u4e0d\u4f20\uff08\u515c\u5e95\uff09\n */\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\nindex 94d0899..10280a9 100644\n--- a/packages/core/src/model/GraphModel.ts\n+++ b/packages/core/src/model/GraphModel.ts\n@@ -481,6 +481,13 @@ class GraphModel {\n this.selectElement?.setSelected(true);\n }\n \n+ @action\n+ selectElementById(id: string) {\n+ this.selectElement?.setSelected(false);\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\n+ this.selectElement?.setSelected(true);\n+ }\n+\n /* \u4fee\u6539\u8fde\u7ebf\u7c7b\u578b */\n @action\n changeEdgeType(type: string): void {\n", "diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n"]
5
["5d256f937f93e5a5ed003df86d38c44834095a11", "9117fdedb9b5ce0345c31b3e1fa22ae8554944d4", "8a9a022baa15befc325f87892c6bdae25b35bc33", "6ae067153cd2608018fd3da76bd6d00a08da4b3a", "9ed3c0c4a72af977fc9150512fb6538f20a94b22"]
["build", "ci", "refactor", "feat", "docs"]
improve test stability * improve test stability by waiting until the message subscription is opened. Message subscriptions are opened outside of the context of the stream processor. Sometimes this may take a while. * enable running the tests repeatably by fixing the engine rule,permission check,use an action for issue assignment,bundle and tree shake assets with webpack,Fix readme Signed-off-by: Ben Johnson <[email protected]>
["diff --git a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\nindex 0c539b9..ffaead1 100644\n--- a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n@@ -334,22 +334,31 @@ public class InterruptingEventSubprocessTest {\n \"timer-event-subprocess\",\n s -> s.startEvent(\"other-timer\").timerWithDuration(\"P1D\").endEvent());\n \n- final long wfInstanceKey = createInstanceAndTriggerEvent(workflow(eventSubprocess));\n+ final long wfInstanceKey = createInstanceAndWaitForTask(workflow(eventSubprocess));\n+\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .withWorkflowInstanceKey(wfInstanceKey)\n+ .withMessageName(\"other-message\")\n+ .await();\n+\n+ triggerEventSubprocess.accept(wfInstanceKey);\n \n // then\n assertThat(\n- RecordingExporter.messageSubscriptionRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .messageSubscriptionRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withMessageName(\"other-message\")\n- .limit(4))\n+ .withMessageName(\"other-message\"))\n .extracting(Record::getIntent)\n .contains(MessageSubscriptionIntent.CLOSED);\n \n assertThat(\n- RecordingExporter.timerRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .timerRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withHandlerNodeId(\"other-timer\")\n- .limit(4))\n+ .withHandlerNodeId(\"other-timer\"))\n .extracting(Record::getIntent)\n .contains(TimerIntent.CANCELED);\n }\ndiff --git a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\nindex 8576be5..50040f4 100644\n--- a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n@@ -71,7 +71,7 @@ public final class EngineRule extends ExternalResource {\n \n private static final int PARTITION_ID = Protocol.DEPLOYMENT_PARTITION;\n private static final RecordingExporter RECORDING_EXPORTER = new RecordingExporter();\n- private StreamProcessorRule environmentRule;\n+ private final StreamProcessorRule environmentRule;\n private final RecordingExporterTestWatcher recordingExporterTestWatcher =\n new RecordingExporterTestWatcher();\n private final int partitionCount;\n@@ -80,7 +80,7 @@ public final class EngineRule extends ExternalResource {\n \n private final Int2ObjectHashMap<SubscriptionCommandMessageHandler> subscriptionHandlers =\n new Int2ObjectHashMap<>();\n- private final ExecutorService subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+ private ExecutorService subscriptionHandlerExecutor;\n \n private EngineRule(final int partitionCount) {\n this(partitionCount, false);\n@@ -115,6 +115,8 @@ public final class EngineRule extends ExternalResource {\n \n @Override\n protected void before() {\n+ subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+\n if (!explicitStart) {\n startProcessors();\n }\n@@ -123,7 +125,6 @@ public final class EngineRule extends ExternalResource {\n @Override\n protected void after() {\n subscriptionHandlerExecutor.shutdown();\n- environmentRule = null;\n subscriptionHandlers.clear();\n }\n \ndiff --git a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\nindex 0f3da21..af6c50e 100755\n--- a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n@@ -248,6 +248,7 @@ public final class StreamProcessorRule implements TestRule {\n @Override\n protected void after() {\n streams = null;\n+ streamProcessingComposite = null;\n }\n }\n \ndiff --git a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\nindex 293df93..a3ede18 100644\n--- a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n+++ b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n@@ -81,4 +81,9 @@ public final class RecordStream extends ExporterRecordStream<RecordValue, Record\n return new IncidentRecordStream(\n filter(r -> r.getValueType() == ValueType.INCIDENT).map(Record.class::cast));\n }\n+\n+ public MessageSubscriptionRecordStream messageSubscriptionRecords() {\n+ return new MessageSubscriptionRecordStream(\n+ filter(r -> r.getValueType() == ValueType.MESSAGE_SUBSCRIPTION).map(Record.class::cast));\n+ }\n }\n", "diff --git a/server/src/routes/course/index.ts b/server/src/routes/course/index.ts\nindex 557f5fb..bc0e490 100644\n--- a/server/src/routes/course/index.ts\n+++ b/server/src/routes/course/index.ts\n@@ -209,7 +209,7 @@ function addStudentApi(router: Router, logger: ILogger) {\n router.post('/student/:githubId/status', ...mentorValidators, updateStudentStatus(logger));\n router.post('/student/:githubId/status-self', courseGuard, selfUpdateStudentStatus(logger));\n router.get('/student/:githubId/score', courseGuard, getScoreByStudent(logger));\n- router.post('/student/:githubId/certificate', courseManagerGuard, ...validators, postStudentCertificate(logger));\n+ router.post('/student/:githubId/certificate', courseManagerGuard, validateGithubId, postStudentCertificate(logger));\n \n router.get('/students', courseSupervisorGuard, getStudents(logger));\n router.get('/students/csv', courseSupervisorGuard, getStudentsCsv(logger));\n", "diff --git a/.github/workflows/assign.yml b/.github/workflows/assign.yml\nindex 29d92a8..758874e 100644\n--- a/.github/workflows/assign.yml\n+++ b/.github/workflows/assign.yml\n@@ -8,8 +8,6 @@ jobs:\n runs-on: ubuntu-latest\n if: ${{ github.event.comment.body == '/take' }}\n steps:\n- - uses: actions/checkout@v2\n- - name: Assign issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }}\n- run: gh issue edit ${{ github.event.issue.number }} --add-assignee \"${{ github.event.comment.user.login }}\"\n- env:\n- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+ - uses: pozil/[email protected]\n+ with:\n+ assignees: ${{ github.event.comment.user.login }}\n", "diff --git a/package.json b/package.json\nindex c8051d2..b0a97fb 100644\n--- a/package.json\n+++ b/package.json\n@@ -60,6 +60,7 @@\n \"babel-cli\": \"^6.16.0\",\n \"babel-core\": \"^6.16.0\",\n \"babel-eslint\": \"^7.0.0\",\n+ \"babel-loader\": \"^6.2.5\",\n \"babel-plugin-transform-class-properties\": \"^6.10.2\",\n \"babel-plugin-transform-flow-strip-types\": \"^6.14.0\",\n \"babel-preset-es2015-node6\": \"^0.3.0\",\n@@ -82,6 +83,7 @@\n \"eslint-plugin-react\": \"^6.3.0\",\n \"flow-bin\": \"^0.33.0\",\n \"jsdom\": \"^9.4.2\",\n+ \"json-loader\": \"^0.5.4\",\n \"jsx-chai\": \"^4.0.0\",\n \"mocha\": \"^3.0.2\",\n \"mock-require\": \"^1.3.0\",\n@@ -91,6 +93,8 @@\n \"rimraf\": \"^2.5.2\",\n \"sinon\": \"^1.17.6\",\n \"sinon-chai\": \"^2.8.0\",\n- \"watch\": \"^1.0.0\"\n+ \"source-map-support\": \"^0.4.3\",\n+ \"watch\": \"^1.0.0\",\n+ \"webpack\": \"^1.13.2\"\n }\n }\ndiff --git a/webpack.config.js b/webpack.config.js\nnew file mode 100644\nindex 0000000..0ca6da1\n--- /dev/null\n+++ b/webpack.config.js\n@@ -0,0 +1,44 @@\n+const webpack = require('webpack');\n+const path = require('path');\n+const fs = require('fs');\n+\n+const nodeModules = {\n+ zmq: 'commonjs zmq',\n+ jmp: 'commonjs jmp',\n+ github: 'commonjs github',\n+};\n+\n+module.exports = {\n+ entry: './src/notebook/index.js',\n+ target: 'electron-renderer',\n+ output: {\n+ path: path.join(__dirname, 'app', 'build'),\n+ filename: 'webpacked-notebook.js'\n+ },\n+ module: {\n+ loaders: [\n+ { test: /\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\n+ { test: /\\.json$/, loader: 'json-loader' },\n+ ]\n+ },\n+ resolve: {\n+ extensions: ['', '.js', '.jsx'],\n+ root: path.join(__dirname, 'app'),\n+ // Webpack 1\n+ modulesDirectories: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ path.resolve(__dirname, 'node_modules'),\n+ ],\n+ // Webpack 2\n+ modules: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ ],\n+ },\n+ externals: nodeModules,\n+ plugins: [\n+ new webpack.IgnorePlugin(/\\.(css|less)$/),\n+ new webpack.BannerPlugin('require(\"source-map-support\").install();',\n+ { raw: true, entryOnly: false })\n+ ],\n+ devtool: 'sourcemap'\n+};\n", "diff --git a/README.md b/README.md\nindex 587d655..da746bb 100644\n--- a/README.md\n+++ b/README.md\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - [docker][docs.sources.docker], [file][docs.sources.file], [journald][docs.sources.journald], [kafka][docs.sources.kafka]\n+* [**Transforms**][docs.transforms] - [json_parser][docs.transforms.json_parser], [log_to_metric][docs.transforms.log_to_metric], [lua][docs.transforms.lua], [regex_parser][docs.transforms.regex_parser]\n+* [**Sinks**][docs.sinks] - [aws_cloudwatch_logs][docs.sinks.aws_cloudwatch_logs], [aws_cloudwatch_metrics][docs.sinks.aws_cloudwatch_metrics], [aws_kinesis_streams][docs.sinks.aws_kinesis_streams], [aws_s3][docs.sinks.aws_s3], [clickhouse][docs.sinks.clickhouse], [elasticsearch][docs.sinks.elasticsearch], and [15 more][docs.sinks]\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,88 +110,6 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.vector.dev | sh\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`docker`**][docs.sources.docker] | Ingests data through the docker engine daemon and outputs [`log`][docs.data-model#log] events. |\n-| [**`file`**][docs.sources.file] | Ingests data through one or more local files and outputs [`log`][docs.data-model#log] events. |\n-| [**`journald`**][docs.sources.journald] | Ingests data through log records from journald and outputs [`log`][docs.data-model#log] events. |\n-| [**`kafka`**][docs.sources.kafka] | Ingests data through Kafka 0.9 or later and outputs [`log`][docs.data-model#log] events. |\n-| [**`statsd`**][docs.sources.statsd] | Ingests data through the StatsD UDP protocol and outputs [`metric`][docs.data-model#metric] events. |\n-| [**`stdin`**][docs.sources.stdin] | Ingests data through standard input (STDIN) and outputs [`log`][docs.data-model#log] events. |\n-| [**`syslog`**][docs.sources.syslog] | Ingests data through the Syslog 5424 protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`tcp`**][docs.sources.tcp] | Ingests data through the TCP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`udp`**][docs.sources.udp] | Ingests data through the UDP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`vector`**][docs.sources.vector] | Ingests data through another upstream [`vector` sink][docs.sinks.vector] and outputs [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events. |\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`add_fields`**][docs.transforms.add_fields] | Accepts [`log`][docs.data-model#log] events and allows you to add one or more log fields. |\n-| [**`add_tags`**][docs.transforms.add_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to add one or more metric tags. |\n-| [**`coercer`**][docs.transforms.coercer] | Accepts [`log`][docs.data-model#log] events and allows you to coerce log fields into fixed types. |\n-| [**`field_filter`**][docs.transforms.field_filter] | Accepts [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events and allows you to filter events by a log field's value. |\n-| [**`grok_parser`**][docs.transforms.grok_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value with [Grok][urls.grok]. |\n-| [**`json_parser`**][docs.transforms.json_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value as JSON. |\n-| [**`log_to_metric`**][docs.transforms.log_to_metric] | Accepts [`log`][docs.data-model#log] events and allows you to convert logs into one or more metrics. |\n-| [**`lua`**][docs.transforms.lua] | Accepts [`log`][docs.data-model#log] events and allows you to transform events with a full embedded [Lua][urls.lua] engine. |\n-| [**`regex_parser`**][docs.transforms.regex_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field's value with a [Regular Expression][urls.regex]. |\n-| [**`remove_fields`**][docs.transforms.remove_fields] | Accepts [`log`][docs.data-model#log] events and allows you to remove one or more log fields. |\n-| [**`remove_tags`**][docs.transforms.remove_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to remove one or more metric tags. |\n-| [**`sampler`**][docs.transforms.sampler] | Accepts [`log`][docs.data-model#log] events and allows you to sample events with a configurable rate. |\n-| [**`split`**][docs.transforms.split] | Accepts [`log`][docs.data-model#log] events and allows you to split a field's value on a given separator and zip the tokens into ordered field names. |\n-| [**`tokenizer`**][docs.transforms.tokenizer] | Accepts [`log`][docs.data-model#log] events and allows you to tokenize a field's value by splitting on white space, ignoring special wrapping characters, and zip the tokens into ordered field names. |\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`aws_cloudwatch_logs`**][docs.sinks.aws_cloudwatch_logs] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS CloudWatch Logs][urls.aws_cw_logs] via the [`PutLogEvents` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html). |\n-| [**`aws_cloudwatch_metrics`**][docs.sinks.aws_cloudwatch_metrics] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [AWS CloudWatch Metrics][urls.aws_cw_metrics] via the [`PutMetricData` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html). |\n-| [**`aws_kinesis_streams`**][docs.sinks.aws_kinesis_streams] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS Kinesis Data Stream][urls.aws_kinesis_data_streams] via the [`PutRecords` API endpoint](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html). |\n-| [**`aws_s3`**][docs.sinks.aws_s3] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS S3][urls.aws_s3] via the [`PutObject` API endpoint](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html). |\n-| [**`blackhole`**][docs.sinks.blackhole] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to a blackhole that simply discards data, designed for testing and benchmarking purposes. |\n-| [**`clickhouse`**][docs.sinks.clickhouse] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Clickhouse][urls.clickhouse] via the [`HTTP` Interface][urls.clickhouse_http]. |\n-| [**`console`**][docs.sinks.console] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to [standard output streams][urls.standard_streams], such as `STDOUT` and `STDERR`. |\n-| [**`datadog_metrics`**][docs.sinks.datadog_metrics] | [Batches](#buffers-and-batches) [`metric`][docs.data-model#metric] events to [Datadog][urls.datadog] metrics service using [HTTP API](https://docs.datadoghq.com/api/?lang=bash#metrics). |\n-| [**`elasticsearch`**][docs.sinks.elasticsearch] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Elasticsearch][urls.elasticsearch] via the [`_bulk` API endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html). |\n-| [**`file`**][docs.sinks.file] | [Streams](#streaming) [`log`][docs.data-model#log] events to a file. |\n-| [**`http`**][docs.sinks.http] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a generic HTTP endpoint. |\n-| [**`kafka`**][docs.sinks.kafka] | [Streams](#streaming) [`log`][docs.data-model#log] events to [Apache Kafka][urls.kafka] via the [Kafka protocol][urls.kafka_protocol]. |\n-| [**`prometheus`**][docs.sinks.prometheus] | [Exposes](#exposing-and-scraping) [`metric`][docs.data-model#metric] events to [Prometheus][urls.prometheus] metrics service. |\n-| [**`splunk_hec`**][docs.sinks.splunk_hec] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a [Splunk HTTP Event Collector][urls.splunk_hec]. |\n-| [**`statsd`**][docs.sinks.statsd] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [StatsD][urls.statsd] metrics service. |\n-| [**`tcp`**][docs.sinks.tcp] | [Streams](#streaming) [`log`][docs.data-model#log] events to a TCP connection. |\n-| [**`vector`**][docs.sinks.vector] | [Streams](#streaming) [`log`][docs.data-model#log] events to another downstream [`vector` source][docs.sources.vector]. |\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright 2019, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\n@@ -200,8 +123,6 @@ the License.\n [docs.configuration]: https://vector.dev/docs/setup/configuration\n [docs.containers]: https://vector.dev/docs/setup/installation/containers\n [docs.correctness]: https://vector.dev/docs/about/correctness\n-[docs.data-model#log]: https://vector.dev/docs/about/data-model#log\n-[docs.data-model#metric]: https://vector.dev/docs/about/data-model#metric\n [docs.data-model.log]: https://vector.dev/docs/about/data-model/log\n [docs.data-model.metric]: https://vector.dev/docs/about/data-model/metric\n [docs.data_model]: https://vector.dev/docs/about/data-model\n@@ -209,6 +130,7 @@ the License.\n [docs.from-archives]: https://vector.dev/docs/setup/installation/manual/from-archives\n [docs.from-source]: https://vector.dev/docs/setup/installation/manual/from-source\n [docs.guarantees]: https://vector.dev/docs/about/guarantees\n+[docs.guides.getting_started]: https://vector.dev/docs/setup/guides/getting-started\n [docs.guides]: https://vector.dev/docs/setup/guides\n [docs.installation]: https://vector.dev/docs/setup/installation\n [docs.monitoring]: https://vector.dev/docs/administration/monitoring\n@@ -224,72 +146,25 @@ the License.\n [docs.sinks.aws_cloudwatch_metrics]: https://vector.dev/docs/reference/sinks/aws_cloudwatch_metrics\n [docs.sinks.aws_kinesis_streams]: https://vector.dev/docs/reference/sinks/aws_kinesis_streams\n [docs.sinks.aws_s3]: https://vector.dev/docs/reference/sinks/aws_s3\n-[docs.sinks.blackhole]: https://vector.dev/docs/reference/sinks/blackhole\n [docs.sinks.clickhouse]: https://vector.dev/docs/reference/sinks/clickhouse\n-[docs.sinks.console]: https://vector.dev/docs/reference/sinks/console\n-[docs.sinks.datadog_metrics]: https://vector.dev/docs/reference/sinks/datadog_metrics\n [docs.sinks.elasticsearch]: https://vector.dev/docs/reference/sinks/elasticsearch\n-[docs.sinks.file]: https://vector.dev/docs/reference/sinks/file\n-[docs.sinks.http]: https://vector.dev/docs/reference/sinks/http\n-[docs.sinks.kafka]: https://vector.dev/docs/reference/sinks/kafka\n-[docs.sinks.prometheus]: https://vector.dev/docs/reference/sinks/prometheus\n-[docs.sinks.splunk_hec]: https://vector.dev/docs/reference/sinks/splunk_hec\n-[docs.sinks.statsd]: https://vector.dev/docs/reference/sinks/statsd\n-[docs.sinks.tcp]: https://vector.dev/docs/reference/sinks/tcp\n-[docs.sinks.vector]: https://vector.dev/docs/reference/sinks/vector\n [docs.sinks]: https://vector.dev/docs/reference/sinks\n [docs.sources.docker]: https://vector.dev/docs/reference/sources/docker\n [docs.sources.file]: https://vector.dev/docs/reference/sources/file\n [docs.sources.journald]: https://vector.dev/docs/reference/sources/journald\n [docs.sources.kafka]: https://vector.dev/docs/reference/sources/kafka\n-[docs.sources.statsd]: https://vector.dev/docs/reference/sources/statsd\n-[docs.sources.stdin]: https://vector.dev/docs/reference/sources/stdin\n-[docs.sources.syslog]: https://vector.dev/docs/reference/sources/syslog\n-[docs.sources.tcp]: https://vector.dev/docs/reference/sources/tcp\n-[docs.sources.udp]: https://vector.dev/docs/reference/sources/udp\n-[docs.sources.vector]: https://vector.dev/docs/reference/sources/vector\n [docs.sources]: https://vector.dev/docs/reference/sources\n [docs.topologies]: https://vector.dev/docs/setup/deployment/topologies\n-[docs.transforms.add_fields]: https://vector.dev/docs/reference/transforms/add_fields\n-[docs.transforms.add_tags]: https://vector.dev/docs/reference/transforms/add_tags\n-[docs.transforms.coercer]: https://vector.dev/docs/reference/transforms/coercer\n-[docs.transforms.field_filter]: https://vector.dev/docs/reference/transforms/field_filter\n-[docs.transforms.grok_parser]: https://vector.dev/docs/reference/transforms/grok_parser\n [docs.transforms.json_parser]: https://vector.dev/docs/reference/transforms/json_parser\n [docs.transforms.log_to_metric]: https://vector.dev/docs/reference/transforms/log_to_metric\n [docs.transforms.lua]: https://vector.dev/docs/reference/transforms/lua\n [docs.transforms.regex_parser]: https://vector.dev/docs/reference/transforms/regex_parser\n-[docs.transforms.remove_fields]: https://vector.dev/docs/reference/transforms/remove_fields\n-[docs.transforms.remove_tags]: https://vector.dev/docs/reference/transforms/remove_tags\n-[docs.transforms.sampler]: https://vector.dev/docs/reference/transforms/sampler\n-[docs.transforms.split]: https://vector.dev/docs/reference/transforms/split\n-[docs.transforms.tokenizer]: https://vector.dev/docs/reference/transforms/tokenizer\n [docs.transforms]: https://vector.dev/docs/reference/transforms\n [docs.updating]: https://vector.dev/docs/administration/updating\n [docs.use_cases]: https://vector.dev/docs/use_cases\n [docs.validating]: https://vector.dev/docs/administration/validating\n-[urls.aws_cw_logs]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html\n-[urls.aws_cw_metrics]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/working_with_metrics.html\n-[urls.aws_kinesis_data_streams]: https://aws.amazon.com/kinesis/data-streams/\n-[urls.aws_s3]: https://aws.amazon.com/s3/\n-[urls.clickhouse]: https://clickhouse.yandex/\n-[urls.clickhouse_http]: https://clickhouse.yandex/docs/en/interfaces/http/\n-[urls.datadog]: https://www.datadoghq.com\n-[urls.elasticsearch]: https://www.elastic.co/products/elasticsearch\n-[urls.grok]: http://grokdebug.herokuapp.com/\n-[urls.kafka]: https://kafka.apache.org/\n-[urls.kafka_protocol]: https://kafka.apache.org/protocol\n-[urls.lua]: https://www.lua.org/\n [urls.mailing_list]: https://vector.dev/mailing_list/\n-[urls.new_sink]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_source]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_transform]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.prometheus]: https://prometheus.io/\n-[urls.regex]: https://en.wikipedia.org/wiki/Regular_expression\n [urls.rust]: https://www.rust-lang.org/\n-[urls.splunk_hec]: http://dev.splunk.com/view/event-collector/SP-CAAAE6M\n-[urls.standard_streams]: https://en.wikipedia.org/wiki/Standard_streams\n-[urls.statsd]: https://github.com/statsd/statsd\n [urls.test_harness]: https://github.com/timberio/vector-test-harness/\n [urls.v0.5.0]: https://github.com/timberio/vector/releases/tag/v0.5.0\n [urls.vector_changelog]: https://github.com/timberio/vector/blob/master/CHANGELOG.md\ndiff --git a/README.md.erb b/README.md.erb\nindex 3b14aa0..cc241eb 100644\n--- a/README.md.erb\n+++ b/README.md.erb\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - <%= common_component_links(:source) %>\n+* [**Transforms**][docs.transforms] - <%= common_component_links(:transform) %>\n+* [**Sinks**][docs.sinks] - <%= common_component_links(:sink) %>\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,44 +110,6 @@ Run the following in your terminal, then follow the on-screen instructions.\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-<%= components_table(metadata.sources.to_h.values.sort) %>\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-<%= components_table(metadata.transforms.to_h.values.sort) %>\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-<%= components_table(metadata.sinks.to_h.values.sort) %>\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright <%= Time.now.year %>, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\ndiff --git a/scripts/generate/templates.rb b/scripts/generate/templates.rb\nindex e5e7ce7..c793ae0 100644\n--- a/scripts/generate/templates.rb\n+++ b/scripts/generate/templates.rb\n@@ -89,6 +89,23 @@ class Templates\n render(\"#{partials_path}/_commit_type_toc_item.md\", binding).gsub(/,$/, \"\")\n end\n \n+ def common_component_links(type, limit = 5)\n+ common = metadata.send(\"#{type.to_s.pluralize}_list\").select(&:common?)\n+\n+ links =\n+ common[0..limit].collect do |component|\n+ \"[#{component.name}][docs.#{type.to_s.pluralize}.#{component.name}]\"\n+ end\n+\n+ num_leftover = common.size - links.size\n+\n+ if num_leftover > 0\n+ links << \"and [15 more][docs.#{type.to_s.pluralize}]\"\n+ end\n+\n+ links.join(\", \")\n+ end\n+\n def component_config_example(component)\n render(\"#{partials_path}/_component_config_example.md\", binding).strip\n end\ndiff --git a/scripts/util/metadata/component.rb b/scripts/util/metadata/component.rb\nindex 0873b2e..4dc5650 100644\n--- a/scripts/util/metadata/component.rb\n+++ b/scripts/util/metadata/component.rb\n@@ -9,6 +9,7 @@ class Component\n include Comparable\n \n attr_reader :beta,\n+ :common,\n :function_category,\n :id,\n :name,\n@@ -18,6 +19,7 @@ class Component\n \n def initialize(hash)\n @beta = hash[\"beta\"] == true\n+ @common = hash[\"common\"] == true\n @function_category = hash.fetch(\"function_category\")\n @name = hash.fetch(\"name\")\n @type ||= self.class.name.downcase\n@@ -71,6 +73,10 @@ class Component\n beta == true\n end\n \n+ def common?\n+ common == true\n+ end\n+\n def context_options\n options_list.select(&:context?)\n end\ndiff --git a/website/src/components/VectorComponents/index.js b/website/src/components/VectorComponents/index.js\nindex b6c5c13..d3c9adf 100644\n--- a/website/src/components/VectorComponents/index.js\n+++ b/website/src/components/VectorComponents/index.js\n@@ -154,7 +154,7 @@ function VectorComponents(props) {\n //\n \n const [onlyAtLeastOnce, setOnlyAtLeastOnce] = useState(queryObj['at-least-once'] == 'true');\n- const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['providers']));\n+ const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['functions']));\n const [onlyLog, setOnlyLog] = useState(queryObj['log'] == 'true');\n const [onlyMetric, setOnlyMetric] = useState(queryObj['metric'] == 'true');\n const [onlyProductionReady, setOnlyProductionReady] = useState(queryObj['prod-ready'] == 'true');\n"]
5
["00be00f2dd0ba7e4bfa4f5dfb74135761f5f86ec", "33c25b2f59c931a7f4af994365522221a7821dca", "fb3a231b29bc8bff9270b99dd4aff9dad599f21f", "4ab28fc2e63e975a0c77e18ae644f34fa5f8771a", "662c5d1346ea2b01c0bc3c11c648cbdf92035fe2"]
["test", "fix", "ci", "build", "docs"]
simplyfy statement,fetch git history,Downgrade @azure/* deps for Node.sj 10 compability,update the formatting for python integration example,add important to override paragraphs in items
["diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n", "diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex 6726e35..9114eeb 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -172,6 +172,8 @@ jobs:\n \n steps:\n - uses: actions/checkout@v3\n+ with:\n+ fetch-depth: 0\n - run: corepack enable\n - uses: actions/setup-node@v3\n with:\ndiff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\nindex aa33d76..1d1cbc9 100644\n--- a/scripts/bump-edge.ts\n+++ b/scripts/bump-edge.ts\n@@ -18,7 +18,7 @@ async function main () {\n \n const config = await loadChangelogConfig(process.cwd())\n \n- const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0', 'main']).stdout\n+ const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0']).stdout\n \n const commits = await getGitDiff(latestTag)\n const bumpType = determineSemverChange(parseCommits(commits, config), config)\n", "diff --git a/package.json b/package.json\nindex 911f8cd..ac29f54 100644\n--- a/package.json\n+++ b/package.json\n@@ -79,7 +79,13 @@\n \"resolutions\": {\n \"@types/ramda\": \"0.27.40\",\n \"rc-tree\": \"4.1.5\",\n+ \"@azure/storage-blob\": \"12.7.0\",\n+ \"@azure/core-paging\": \"1.1.3\",\n+ \"@azure/logger\": \"1.0.0\",\n \"@azure/core-auth\": \"1.2.0\",\n+ \"@azure/core-lro\": \"1.0.5\",\n+ \"@azure/core-tracing\": \"1.0.0-preview.10\",\n+ \"@azure/core-http\": \"1.2.6\",\n \"testcontainers\": \"7.12.1\"\n },\n \"license\": \"MIT\"\ndiff --git a/yarn.lock b/yarn.lock\nindex 5019f68..99235b5 100644\n--- a/yarn.lock\n+++ b/yarn.lock\n@@ -1144,19 +1144,19 @@\n \"@azure/abort-controller\" \"^1.0.0\"\n tslib \"^2.0.0\"\n \n-\"@azure/core-http@^2.0.0\":\n- version \"2.2.2\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da\"\n- integrity sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==\n+\"@azure/[email protected]\", \"@azure/core-http@^1.2.0\", \"@azure/core-http@^2.0.0\":\n+ version \"1.2.6\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-http/-/core-http-1.2.6.tgz#9cd508418572d2062fd3175274219438772bdb65\"\n+ integrity sha512-odtH7UMKtekc5YQ86xg9GlVHNXR6pq2JgJ5FBo7/jbOjNGdBqcrIVrZx2bevXVJz/uUTSx6vUf62gzTXTfqYSQ==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n \"@azure/core-asynciterator-polyfill\" \"^1.0.0\"\n \"@azure/core-auth\" \"^1.3.0\"\n- \"@azure/core-tracing\" \"1.0.0-preview.13\"\n+ \"@azure/core-tracing\" \"1.0.0-preview.11\"\n \"@azure/logger\" \"^1.0.0\"\n \"@types/node-fetch\" \"^2.5.0\"\n- \"@types/tunnel\" \"^0.0.3\"\n- form-data \"^4.0.0\"\n+ \"@types/tunnel\" \"^0.0.1\"\n+ form-data \"^3.0.0\"\n node-fetch \"^2.6.0\"\n process \"^0.11.10\"\n tough-cookie \"^4.0.0\"\n@@ -1165,38 +1165,39 @@\n uuid \"^8.3.0\"\n xml2js \"^0.4.19\"\n \n-\"@azure/core-lro@^2.2.0\":\n- version \"2.2.1\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-lro/-/core-lro-2.2.1.tgz#5527b41037c658d3aefc19d68633e51e53d6e6a3\"\n- integrity sha512-HE6PBl+mlKa0eBsLwusHqAqjLc5n9ByxeDo3Hz4kF3B1hqHvRkBr4oMgoT6tX7Hc3q97KfDctDUon7EhvoeHPA==\n+\"@azure/[email protected]\", \"@azure/core-lro@^2.0.0\":\n+ version \"1.0.5\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-lro/-/core-lro-1.0.5.tgz#856a2cb6a9bec739ee9cde33a27cc28f81ac0522\"\n+ integrity sha512-0EFCFZxARrIoLWMIRt4vuqconRVIO2Iin7nFBfJiYCCbKp5eEmxutNk8uqudPmG0XFl5YqlVh68/al/vbE5OOg==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n- \"@azure/core-tracing\" \"1.0.0-preview.13\"\n- \"@azure/logger\" \"^1.0.0\"\n- tslib \"^2.2.0\"\n+ \"@azure/core-http\" \"^1.2.0\"\n+ \"@azure/core-tracing\" \"1.0.0-preview.11\"\n+ events \"^3.0.0\"\n+ tslib \"^2.0.0\"\n \n-\"@azure/core-paging@^1.1.1\":\n- version \"1.2.0\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-paging/-/core-paging-1.2.0.tgz#3754da429e8687bdc3613c750e79a564582e802b\"\n- integrity sha512-ZX1bCjm/MjKPCN6kQD/9GJErYSoKA8YWp6YWoo5EIzcTWlSBLXu3gNaBTUl8usGl+UShiKo7b4Gdy1NSTIlpZg==\n+\"@azure/[email protected]\", \"@azure/core-paging@^1.1.1\":\n+ version \"1.1.3\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-paging/-/core-paging-1.1.3.tgz#3587c9898a0530cacb64bab216d7318468aa5efc\"\n+ integrity sha512-his7Ah40ThEYORSpIAwuh6B8wkGwO/zG7gqVtmSE4WAJ46e36zUDXTKReUCLBDc6HmjjApQQxxcRFy5FruG79A==\n dependencies:\n \"@azure/core-asynciterator-polyfill\" \"^1.0.0\"\n- tslib \"^2.2.0\"\n \n-\"@azure/[email protected]\":\n- version \"1.0.0-preview.13\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644\"\n- integrity sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==\n+\"@azure/[email protected]\", \"@azure/[email protected]\", \"@azure/[email protected]\":\n+ version \"1.0.0-preview.10\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.10.tgz#e7060272145dddad4486765030d1b037cd52a8ea\"\n+ integrity sha512-iIwjtMwQnsxB7cYkugMx+s4W1nfy3+pT/ceo+uW1fv4YDgYe84nh+QP0fEC9IH/3UATLSWbIBemdMHzk2APUrw==\n dependencies:\n- \"@opentelemetry/api\" \"^1.0.1\"\n- tslib \"^2.2.0\"\n+ \"@opencensus/web-types\" \"0.0.7\"\n+ \"@opentelemetry/api\" \"^0.10.2\"\n+ tslib \"^2.0.0\"\n \n-\"@azure/logger@^1.0.0\":\n- version \"1.0.3\"\n- resolved \"https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96\"\n- integrity sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==\n+\"@azure/[email protected]\", \"@azure/logger@^1.0.0\":\n+ version \"1.0.0\"\n+ resolved \"https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.0.tgz#48b371dfb34288c8797e5c104f6c4fb45bf1772c\"\n+ integrity sha512-g2qLDgvmhyIxR3JVS8N67CyIOeFRKQlX/llxYJQr1OSGQqM3HTpVP8MjmjcEKbL/OIt2N9C9UFaNQuKOw1laOA==\n dependencies:\n- tslib \"^2.2.0\"\n+ tslib \"^1.9.3\"\n \n \"@azure/ms-rest-azure-env@^2.0.0\":\n version \"2.0.0\"\n@@ -1227,19 +1228,19 @@\n \"@azure/ms-rest-js\" \"^2.0.4\"\n adal-node \"^0.2.2\"\n \n-\"@azure/storage-blob@^12.5.0\":\n- version \"12.8.0\"\n- resolved \"https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.8.0.tgz#97b7ecc6c7b17bcbaf0281c79c16af6f512d6130\"\n- integrity sha512-c8+Wz19xauW0bGkTCoqZH4dYfbtBniPiGiRQOn1ca6G5jsjr4azwaTk9gwjVY8r3vY2Taf95eivLzipfIfiS4A==\n+\"@azure/[email protected]\", \"@azure/storage-blob@^12.5.0\":\n+ version \"12.7.0\"\n+ resolved \"https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.7.0.tgz#f17f278000a46bca516e5864d846cd8fa57d6d7d\"\n+ integrity sha512-7YEWEx03Us/YBxthzBv788R7jokwpCD5KcIsvtE5xRaijNX9o80KXpabhEwLR9DD9nmt/AlU/c1R+aXydgCduQ==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n \"@azure/core-http\" \"^2.0.0\"\n- \"@azure/core-lro\" \"^2.2.0\"\n+ \"@azure/core-lro\" \"^2.0.0\"\n \"@azure/core-paging\" \"^1.1.1\"\n \"@azure/core-tracing\" \"1.0.0-preview.13\"\n \"@azure/logger\" \"^1.0.0\"\n events \"^3.0.0\"\n- tslib \"^2.2.0\"\n+ tslib \"^2.0.0\"\n \n \"@babel/cli@^7.5.5\":\n version \"7.16.0\"\n@@ -2888,9 +2889,9 @@\n integrity sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw==\n \n \"@google-cloud/bigquery@^5.6.0\":\n- version \"5.9.1\"\n- resolved \"https://registry.yarnpkg.com/@google-cloud/bigquery/-/bigquery-5.9.1.tgz#96cee86fa0caef4a7e1470efde9295bc09f5981f\"\n- integrity sha512-80pMzhAC299CSiXW9TvR8AARLaPRDeQg8pSAvrVcLXcUkx1hWvVx2m94nBZ4KUoZb4LVWIHHYhvFB6XvIcxqjw==\n+ version \"5.9.2\"\n+ resolved \"https://registry.yarnpkg.com/@google-cloud/bigquery/-/bigquery-5.9.2.tgz#d53eac984fdd256d31be490762157e5f6c5b82c3\"\n+ integrity sha512-lJiMsSekcnhrzzR9e48yx8iOx+ElP3r/wOoionXL6eDPbA41RgP12if5NmMqHZzfWdKlWV2plspEPrbjhJAzCw==\n dependencies:\n \"@google-cloud/common\" \"^3.1.0\"\n \"@google-cloud/paginator\" \"^3.0.0\"\n@@ -4831,11 +4832,28 @@\n resolved \"https://registry.yarnpkg.com/@oozcitak/util/-/util-8.3.8.tgz#10f65fe1891fd8cde4957360835e78fd1936bfdd\"\n integrity sha512-T8TbSnGsxo6TDBJx/Sgv/BlVJL3tshxZP7Aq5R1mSnM5OcHY2dQaxLMu2+E8u3gN0MLOzdjurqN4ZRVuzQycOQ==\n \n-\"@opentelemetry/api@^1.0.0\", \"@opentelemetry/api@^1.0.1\":\n+\"@opencensus/[email protected]\":\n+ version \"0.0.7\"\n+ resolved \"https://registry.yarnpkg.com/@opencensus/web-types/-/web-types-0.0.7.tgz#4426de1fe5aa8f624db395d2152b902874f0570a\"\n+ integrity sha512-xB+w7ZDAu3YBzqH44rCmG9/RlrOmFuDPt/bpf17eJr8eZSrLt7nc7LnWdxM9Mmoj/YKMHpxRg28txu3TcpiL+g==\n+\n+\"@opentelemetry/api@^0.10.2\":\n+ version \"0.10.2\"\n+ resolved \"https://registry.yarnpkg.com/@opentelemetry/api/-/api-0.10.2.tgz#9647b881f3e1654089ff7ea59d587b2d35060654\"\n+ integrity sha512-GtpMGd6vkzDMYcpu2t9LlhEgMy/SzBwRnz48EejlRArYqZzqSzAsKmegUK7zHgl+EOIaK9mKHhnRaQu3qw20cA==\n+ dependencies:\n+ \"@opentelemetry/context-base\" \"^0.10.2\"\n+\n+\"@opentelemetry/api@^1.0.0\":\n version \"1.0.3\"\n resolved \"https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.0.3.tgz#13a12ae9e05c2a782f7b5e84c3cbfda4225eaf80\"\n integrity sha512-puWxACExDe9nxbBB3lOymQFrLYml2dVOrd7USiVRnSbgXE+KwBu+HxFvxrzfqsiSda9IWsXJG1ef7C1O2/GmKQ==\n \n+\"@opentelemetry/context-base@^0.10.2\":\n+ version \"0.10.2\"\n+ resolved \"https://registry.yarnpkg.com/@opentelemetry/context-base/-/context-base-0.10.2.tgz#55bea904b2b91aa8a8675df9eaba5961bddb1def\"\n+ integrity sha512-hZNKjKOYsckoOEgBziGMnBcX0M7EtstnCmwz5jZUOUYwlZ+/xxX6z3jPu1XVO2Jivk0eLfuP9GP+vFD49CMetw==\n+\n \"@opentelemetry/semantic-conventions@^0.24.0\":\n version \"0.24.0\"\n resolved \"https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-0.24.0.tgz#1028ef0e0923b24916158d80d2ddfd67ea8b6740\"\n@@ -5564,9 +5582,9 @@\n integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\n \n \"@types/jsonwebtoken@^8.5.0\":\n- version \"8.5.5\"\n- resolved \"https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.5.tgz#da5f2f4baee88f052ef3e4db4c1a0afb46cff22c\"\n- integrity sha512-OGqtHQ7N5/Ap/TUwO6IgHDuLiAoTmHhGpNvgkCm/F4N6pKzx/RBSfr2OXZSwC6vkfnsEdb6+7DNZVtiXiwdwFw==\n+ version \"8.5.6\"\n+ resolved \"https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.6.tgz#1913e5a61e70a192c5a444623da4901a7b1a9d42\"\n+ integrity sha512-+P3O/xC7nzVizIi5VbF34YtqSonFsdnbXBnWUCYRiKOi1f9gA4sEFvXkrGr/QVV23IbMYvcoerI7nnhDUiWXRQ==\n dependencies:\n \"@types/node\" \"*\"\n \n@@ -5753,18 +5771,18 @@\n \"@types/react\" \"*\"\n \n \"@types/react@*\", \"@types/react@^17.0.3\":\n- version \"17.0.34\"\n- resolved \"https://registry.yarnpkg.com/@types/react/-/react-17.0.34.tgz#797b66d359b692e3f19991b6b07e4b0c706c0102\"\n- integrity sha512-46FEGrMjc2+8XhHXILr+3+/sTe3OfzSPU9YGKILLrUYbQ1CLQC9Daqo1KzENGXAWwrFwiY0l4ZbF20gRvgpWTg==\n+ version \"17.0.35\"\n+ resolved \"https://registry.yarnpkg.com/@types/react/-/react-17.0.35.tgz#217164cf830267d56cd1aec09dcf25a541eedd4c\"\n+ integrity sha512-r3C8/TJuri/SLZiiwwxQoLAoavaczARfT9up9b4Jr65+ErAUX3MIkU0oMOQnrpfgHme8zIqZLX7O5nnjm5Wayw==\n dependencies:\n \"@types/prop-types\" \"*\"\n \"@types/scheduler\" \"*\"\n csstype \"^3.0.2\"\n \n \"@types/react@^16.9.41\":\n- version \"16.14.20\"\n- resolved \"https://registry.yarnpkg.com/@types/react/-/react-16.14.20.tgz#ff6e932ad71d92c27590e4a8667c7a53a7d0baad\"\n- integrity sha512-SV7TaVc8e9E/5Xuv6TIyJ5VhQpZoVFJqX6IZgj5HZoFCtIDCArE3qXkcHlc6O/Ud4UwcMoX+tlvDA95YrKdLgA==\n+ version \"16.14.21\"\n+ resolved \"https://registry.yarnpkg.com/@types/react/-/react-16.14.21.tgz#35199b21a278355ec7a3c40003bd6a334bd4ae4a\"\n+ integrity sha512-rY4DzPKK/4aohyWiDRHS2fotN5rhBSK6/rz1X37KzNna9HJyqtaGAbq9fVttrEPWF5ywpfIP1ITL8Xi2QZn6Eg==\n dependencies:\n \"@types/prop-types\" \"*\"\n \"@types/scheduler\" \"*\"\n@@ -5950,10 +5968,10 @@\n resolved \"https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.1.tgz#8f80dd965ad81f3e1bc26d6f5c727e132721ff40\"\n integrity sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==\n \n-\"@types/tunnel@^0.0.3\":\n- version \"0.0.3\"\n- resolved \"https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.3.tgz#f109e730b072b3136347561fc558c9358bb8c6e9\"\n- integrity sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==\n+\"@types/tunnel@^0.0.1\":\n+ version \"0.0.1\"\n+ resolved \"https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.1.tgz#0d72774768b73df26f25df9184273a42da72b19c\"\n+ integrity sha512-AOqu6bQu5MSWwYvehMXLukFHnupHrpZ8nvgae5Ggie9UwzDR1CCwoXgSSWNZJuyOlCdfdsWMA5F2LlmvyoTv8A==\n dependencies:\n \"@types/node\" \"*\"\n \n@@ -5999,9 +6017,9 @@\n source-map \"^0.6.1\"\n \n \"@types/webpack@^4\", \"@types/webpack@^4.0.0\", \"@types/webpack@^4.41.8\":\n- version \"4.41.31\"\n- resolved \"https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.31.tgz#c35f252a3559ddf9c85c0d8b0b42019025e581aa\"\n- integrity sha512-/i0J7sepXFIp1ZT7FjUGi1eXMCg8HCCzLJEQkKsOtbJFontsJLolBcDC+3qxn5pPwiCt1G0ZdRmYRzNBtvpuGQ==\n+ version \"4.41.32\"\n+ resolved \"https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.32.tgz#a7bab03b72904070162b2f169415492209e94212\"\n+ integrity sha512-cb+0ioil/7oz5//7tZUSwbrSAN/NWHrQylz5cW8G0dWTcF/g+/dSdMlKVZspBYuMAN1+WnwHrkxiRrLcwd0Heg==\n dependencies:\n \"@types/node\" \"*\"\n \"@types/tapable\" \"^1\"\n@@ -7624,9 +7642,9 @@ autoprefixer@^9.6.1, autoprefixer@^9.6.5, autoprefixer@^9.8.6:\n postcss-value-parser \"^4.1.0\"\n \n aws-sdk@^2.404.0, aws-sdk@^2.787.0, aws-sdk@^2.819.0, aws-sdk@^2.878.0:\n- version \"2.1028.0\"\n- resolved \"https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1028.0.tgz#ce076076174afa9bd311406b8186ea90163e3331\"\n- integrity sha512-OmR0NcpU8zsDcUOZhM+eZ6CzlUFtuaEuRyjm6mxDO0KI7lJAp7/NzB6tcellRrgWxL+NO7b5TSxi+m28qu5ocQ==\n+ version \"2.1029.0\"\n+ resolved \"https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1029.0.tgz#702d4d6092adcf0ceaf37ae0da6fee07a71f39dd\"\n+ integrity sha512-nCmaMPkJr3EATXaeqR3JeNC0GTDH2lJZ3Xq/ZCAW+yrfaPQWv8HqJJHBCNGtmk3FmcCoxc7ed/gEB8XSl0tocA==\n dependencies:\n buffer \"4.9.2\"\n events \"1.1.1\"\n@@ -8596,11 +8614,16 @@ [email protected]:\n resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048\"\n integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=\n \[email protected], bytes@^3.1.0:\[email protected]:\n version \"3.1.0\"\n resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6\"\n integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==\n \n+bytes@^3.1.0:\n+ version \"3.1.1\"\n+ resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.1.1.tgz#3f018291cb4cbad9accb6e6970bca9c8889e879a\"\n+ integrity sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg==\n+\n [email protected]:\n version \"15.0.3\"\n resolved \"https://registry.yarnpkg.com/cacache/-/cacache-15.0.3.tgz#2225c2d1dd8e872339950d6a39c051e0e9334392\"\n@@ -11359,9 +11382,9 @@ ejs@^2.6.1:\n integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==\n \n electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.896:\n- version \"1.3.896\"\n- resolved \"https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.896.tgz#4a94efe4870b1687eafd5c378198a49da06e8a1b\"\n- integrity sha512-NcGkBVXePiuUrPLV8IxP43n1EOtdg+dudVjrfVEUd/bOqpQUFZ2diL5PPYzbgEhZFEltdXV3AcyKwGnEQ5lhMA==\n+ version \"1.3.899\"\n+ resolved \"https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.899.tgz#4d7d040e73def3d5f5bd6b8a21049025dce6fce0\"\n+ integrity sha512-w16Dtd2zl7VZ4N4Db+FIa7n36sgPGCKjrKvUUmp5ialsikvcQLjcJR9RWnlYNxIyEHLdHaoIZEqKsPxU9MdyBg==\n \n elegant-spinner@^1.0.1:\n version \"1.0.1\"\n@@ -12887,15 +12910,6 @@ form-data@^3.0.0:\n combined-stream \"^1.0.8\"\n mime-types \"^2.1.12\"\n \n-form-data@^4.0.0:\n- version \"4.0.0\"\n- resolved \"https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452\"\n- integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==\n- dependencies:\n- asynckit \"^0.4.0\"\n- combined-stream \"^1.0.8\"\n- mime-types \"^2.1.12\"\n-\n form-data@~2.3.2:\n version \"2.3.3\"\n resolved \"https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6\"\n@@ -21198,11 +21212,13 @@ proto-list@~1.2.1:\n integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk=\n \n proto3-json-serializer@^0.1.5:\n- version \"0.1.5\"\n- resolved \"https://registry.yarnpkg.com/proto3-json-serializer/-/proto3-json-serializer-0.1.5.tgz#c619769a59dc7fd8adf4e6c5060b9bf3039c8304\"\n- integrity sha512-G395jcZkgNXNeS+6FGqd09TsXeoCs9wmBWByDiwFy7Yd7HD8pyfyvf6q+rGh7PhT4AshRpG4NowzoKYUtkNjKg==\n+ version \"0.1.6\"\n+ resolved \"https://registry.yarnpkg.com/proto3-json-serializer/-/proto3-json-serializer-0.1.6.tgz#67cf3b8d5f4c8bebfc410698ad3b1ed64da39c7b\"\n+ integrity sha512-tGbV6m6Kad8NqxMh5hw87euPS0YoZSAOIfvR01zYkQV8Gpx1V/8yU/0gCKCvfCkhAJsjvzzhnnsdQxA1w7PSog==\n+ dependencies:\n+ protobufjs \"^6.11.2\"\n \[email protected], protobufjs@^6.10.0:\[email protected], protobufjs@^6.10.0, protobufjs@^6.11.2:\n version \"6.11.2\"\n resolved \"https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.2.tgz#de39fabd4ed32beaa08e9bb1e30d08544c1edf8b\"\n integrity sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==\n", "diff --git a/website/docs/integration/python.md b/website/docs/integration/python.md\nindex 064cae3..b6b720d 100644\n--- a/website/docs/integration/python.md\n+++ b/website/docs/integration/python.md\n@@ -13,6 +13,7 @@ header = \"All notable changes to this project will be documented in this file.\"\n body = \"...\"\n footer = \"<!-- generated by git-cliff -->\"\n # see [changelog] section for more keys\n+\n [tool.git-cliff.git]\n conventional_commits = true\n commit_parsers = []\n", "diff --git a/packages/core/src/components/text/text.ios.scss b/packages/core/src/components/text/text.ios.scss\nindex a3c58e2..2a020ab 100644\n--- a/packages/core/src/components/text/text.ios.scss\n+++ b/packages/core/src/components/text/text.ios.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-ios) {\n \n .text-ios-#{$color-name},\n- .text-ios-#{$color-name} a {\n- color: $color-base;\n+ .text-ios-#{$color-name} a,\n+ .text-ios-#{$color-name} p {\n+ color: $color-base !important\n }\n \n }\ndiff --git a/packages/core/src/components/text/text.md.scss b/packages/core/src/components/text/text.md.scss\nindex b397acb..050af1a 100644\n--- a/packages/core/src/components/text/text.md.scss\n+++ b/packages/core/src/components/text/text.md.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-md) {\n \n .text-md-#{$color-name},\n- .text-md-#{$color-name} a {\n- color: $color-base;\n+ .text-md-#{$color-name} a,\n+ .text-md-#{$color-name} p {\n+ color: $color-base !important;\n }\n \n }\n"]
5
["f86944ff00b970d7e2da48abbff43e58bdf29b99", "e11d55a4922978b89a2c50bf577124b09449e89c", "5ef4fd29a4cef69c6c348dd25156934df041f183", "3ee672483790ec71c700907a6e93af4698492026", "7ab363f7ba2807b3eb9895e47f4fcd058f43ae5e"]
["refactor", "ci", "build", "docs", "test"]
fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>,include `token_version` value in jwt payload use a generic function to populate jwt token to avoid repetition re #2361 Signed-off-by: Pranav C <[email protected]>,increase timeout of multiregion failover test Due to the nature of the test, restarts and failovers can take long. If the recovery takes longer than 15m, then the test will fail unnecessarily. Since we are not really testing for how was it can recover, it is ok to increase the maxInstanceDuration.,fix test Write another record so the commit position is updated and we can take a snapshot,update version (v0.6.18)
["diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n", "diff --git a/packages/nocodb/src/lib/meta/api/sync/importApis.ts b/packages/nocodb/src/lib/meta/api/sync/importApis.ts\nindex 2facc67..1f8d137 100644\n--- a/packages/nocodb/src/lib/meta/api/sync/importApis.ts\n+++ b/packages/nocodb/src/lib/meta/api/sync/importApis.ts\n@@ -7,7 +7,7 @@ import NocoJobs from '../../../jobs/NocoJobs';\n import job, { AirtableSyncConfig } from './helpers/job';\n import SyncSource from '../../../models/SyncSource';\n import Noco from '../../../Noco';\n-import * as jwt from 'jsonwebtoken';\n+import { genJwt } from '../userApi/helpers';\n const AIRTABLE_IMPORT_JOB = 'AIRTABLE_IMPORT_JOB';\n const AIRTABLE_PROGRESS_JOB = 'AIRTABLE_PROGRESS_JOB';\n \n@@ -76,18 +76,7 @@ export default (router: Router, clients: { [id: string]: Socket }) => {\n const syncSource = await SyncSource.get(req.params.syncId);\n \n const user = await syncSource.getUser();\n- const token = jwt.sign(\n- {\n- email: user.email,\n- firstname: user.firstname,\n- lastname: user.lastname,\n- id: user.id,\n- roles: user.roles\n- },\n-\n- Noco.getConfig().auth.jwt.secret,\n- Noco.getConfig().auth.jwt.options\n- );\n+ const token = genJwt(user, Noco.getConfig());\n \n // Treat default baseUrl as siteUrl from req object\n let baseURL = (req as any).ncSiteUrl;\ndiff --git a/packages/nocodb/src/lib/meta/api/userApi/helpers.ts b/packages/nocodb/src/lib/meta/api/userApi/helpers.ts\nnew file mode 100644\nindex 0000000..48f7c11\n--- /dev/null\n+++ b/packages/nocodb/src/lib/meta/api/userApi/helpers.ts\n@@ -0,0 +1,18 @@\n+import * as jwt from 'jsonwebtoken';\n+import User from '../../../models/User';\n+import { NcConfig } from '../../../../interface/config';\n+\n+export function genJwt(user: User, config: NcConfig) {\n+ return jwt.sign(\n+ {\n+ email: user.email,\n+ firstname: user.firstname,\n+ lastname: user.lastname,\n+ id: user.id,\n+ roles: user.roles,\n+ token_version: user.token_version\n+ },\n+ config.auth.jwt.secret,\n+ config.auth.jwt.options\n+ );\n+}\ndiff --git a/packages/nocodb/src/lib/meta/api/userApi/userApis.ts b/packages/nocodb/src/lib/meta/api/userApi/userApis.ts\nindex 2b8384a..7a6fa31 100644\n--- a/packages/nocodb/src/lib/meta/api/userApi/userApis.ts\n+++ b/packages/nocodb/src/lib/meta/api/userApi/userApis.ts\n@@ -10,7 +10,6 @@ import User from '../../../models/User';\n import { Tele } from 'nc-help';\n \n const { v4: uuidv4 } = require('uuid');\n-import * as jwt from 'jsonwebtoken';\n import Audit from '../../../models/Audit';\n import crypto from 'crypto';\n import NcPluginMgrv2 from '../../helpers/NcPluginMgrv2';\n@@ -20,6 +19,7 @@ import extractProjectIdAndAuthenticate from '../../helpers/extractProjectIdAndAu\n import ncMetaAclMw from '../../helpers/ncMetaAclMw';\n import { MetaTable } from '../../../utils/globals';\n import Noco from '../../../Noco';\n+import { genJwt } from './helpers';\n \n export async function signup(req: Request, res: Response<TableType>) {\n const {\n@@ -147,18 +147,7 @@ export async function signup(req: Request, res: Response<TableType>) {\n });\n \n res.json({\n- token: jwt.sign(\n- {\n- email: user.email,\n- firstname: user.firstname,\n- lastname: user.lastname,\n- id: user.id,\n- roles: user.roles,\n- token_version: user.token_version\n- },\n- Noco.getConfig().auth.jwt.secret,\n- Noco.getConfig().auth.jwt.options\n- )\n+ token: genJwt(user, Noco.getConfig())\n } as any);\n }\n \n@@ -205,19 +194,7 @@ async function successfulSignIn({\n });\n \n res.json({\n- token: jwt.sign(\n- {\n- email: user.email,\n- firstname: user.firstname,\n- lastname: user.lastname,\n- id: user.id,\n- roles: user.roles,\n- token_version\n- },\n-\n- Noco.getConfig().auth.jwt.secret,\n- Noco.getConfig().auth.jwt.options\n- )\n+ token: genJwt(user, Noco.getConfig())\n } as any);\n } catch (e) {\n console.log(e);\n@@ -477,17 +454,7 @@ async function refreshToken(req, res): Promise<any> {\n setTokenCookie(res, refreshToken);\n \n res.json({\n- token: jwt.sign(\n- {\n- email: user.email,\n- firstname: user.firstname,\n- lastname: user.lastname,\n- id: user.id,\n- roles: user.roles\n- },\n- Noco.getConfig().auth.jwt.secret,\n- Noco.getConfig().auth.jwt.options\n- )\n+ token: genJwt(user, Noco.getConfig())\n } as any);\n } catch (e) {\n return res.status(400).json({ msg: e.message });\n", "diff --git a/.github/workflows/e2e-testbench.yaml b/.github/workflows/e2e-testbench.yaml\nindex 708f97f..fd0b918 100644\n--- a/.github/workflows/e2e-testbench.yaml\n+++ b/.github/workflows/e2e-testbench.yaml\n@@ -31,6 +31,11 @@ on:\n default: null\n required: false\n type: string\n+ maxInstanceDuration:\n+ description: 'If an instance takes longer than the given duration to complete, test will fail.'\n+ default: '15m'\n+ required: false\n+ type: string\n \n workflow_call:\n inputs:\n@@ -59,6 +64,11 @@ on:\n default: null\n required: false\n type: string\n+ maxInstanceDuration:\n+ description: 'If an instance takes longer than the given duration to complete, test will fail.'\n+ default: '15m'\n+ required: false\n+ type: string\n \n jobs:\n e2e:\n@@ -81,7 +91,7 @@ jobs:\n {\n \\\"maxTestDuration\\\": \\\"${{ inputs.maxTestDuration || 'P5D' }}\\\",\n \\\"starter\\\": [ {\\\"rate\\\": 50, \\\"processId\\\": \\\"one-task-one-timer\\\" } ],\n- \\\"verifier\\\" : { \\\"maxInstanceDuration\\\" : \\\"15m\\\" },\n+ \\\"verifier\\\" : { \\\"maxInstanceDuration\\\" : \\\"${{ inputs.maxInstanceDuration }}\\\" },\n \\\"fault\\\": ${{ inputs.fault || 'null' }}\n }\n }\ndiff --git a/.github/workflows/weekly-e2e.yml b/.github/workflows/weekly-e2e.yml\nindex 93aaeb5..4bd0afd 100644\n--- a/.github/workflows/weekly-e2e.yml\n+++ b/.github/workflows/weekly-e2e.yml\n@@ -31,4 +31,5 @@ jobs:\n maxTestDuration: P1D\n clusterPlan: Multiregion test simulation\n fault: \\\"2-region-dataloss-failover\\\"\n+ maxInstanceDuration: 40m\n secrets: inherit\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\nindex 24f1316..881c727 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n@@ -70,6 +70,14 @@ public class ReaderCloseTest {\n .getCluster()\n .getNodeId();\n clusteringRule.forceClusterToHaveNewLeader(followerId);\n+ // because of https://github.com/camunda-cloud/zeebe/issues/8329\n+ // we need to add another record so we can do a snapshot\n+ clientRule\n+ .getClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"test\")\n+ .correlationKey(\"test\")\n+ .send();\n \n // when\n clusteringRule.triggerAndWaitForSnapshots();\n@@ -78,6 +86,7 @@ public class ReaderCloseTest {\n for (final Broker broker : clusteringRule.getBrokers()) {\n assertThatFilesOfDeletedSegmentsDoesNotExist(broker);\n }\n+ assertThat(leaderId).isNotEqualTo(clusteringRule.getLeaderForPartition(1).getNodeId());\n }\n \n private void assertThatFilesOfDeletedSegmentsDoesNotExist(final Broker leader)\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n"]
5
["9ed3c0c4a72af977fc9150512fb6538f20a94b22", "5ac169d840351733849207e082d2a65abf629c92", "ee824ddd71cbc4ccc26f7c6876d379c4927b79e6", "47df74d40becf915a9d89cdb887abd259b77def0", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50"]
["docs", "fix", "ci", "test", "build"]
add react ecosystem,support document.html,lint source on ci,added vue3 readme,move group logical op outside Signed-off-by: Pranav C <[email protected]>
["diff --git a/package.json b/package.json\nindex 1ba8c4f..d1de9a0 100644\n--- a/package.json\n+++ b/package.json\n@@ -36,14 +36,19 @@\n \"@types/node\": \"^9.3.0\",\n \"@types/react\": \"^16.0.34\",\n \"@types/react-dom\": \"^16.0.3\",\n+ \"@types/react-motion\": \"^0.0.25\",\n \"bootstrap-sass\": \"^3.3.7\",\n \"highcharts\": \"^6.0.4\",\n \"html2canvas\": \"^1.0.0-alpha.9\",\n+ \"immer\": \"^1.2.1\",\n \"lodash\": \"^4.17.4\",\n \"moment\": \"^2.20.1\",\n \"normalize.css\": \"^8.0.0\",\n- \"react\": \"^16.2.0\",\n- \"react-dom\": \"^16.2.0\",\n+ \"react\": \"^16.3.1\",\n+ \"react-dom\": \"^16.3.1\",\n+ \"react-motion\": \"^0.5.2\",\n+ \"react-redux\": \"^5.0.7\",\n+ \"redux\": \"^3.7.2\",\n \"rxjs\": \"^5.5.6\",\n \"vue\": \"^2.5.13\",\n \"vue-plugin-webextension-i18n\": \"^0.1.0\",\ndiff --git a/yarn.lock b/yarn.lock\nindex c8898d8..5d0fc9f 100644\n--- a/yarn.lock\n+++ b/yarn.lock\n@@ -187,6 +187,12 @@\n \"@types/node\" \"*\"\n \"@types/react\" \"*\"\n \n+\"@types/react-motion@^0.0.25\":\n+ version \"0.0.25\"\n+ resolved \"https://registry.npmjs.org/@types/react-motion/-/react-motion-0.0.25.tgz#2445745ee8e8e6149faa47a36ff6b0d4c21dbf94\"\n+ dependencies:\n+ \"@types/react\" \"*\"\n+\n \"@types/react@*\", \"@types/react@^16.0.34\":\n version \"16.0.40\"\n resolved \"https://registry.npmjs.org/@types/react/-/react-16.0.40.tgz#caabc2296886f40b67f6fc80f0f3464476461df9\"\n@@ -3837,6 +3843,10 @@ [email protected]:\n version \"4.2.1\"\n resolved \"https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb\"\n \n+hoist-non-react-statics@^2.5.0:\n+ version \"2.5.0\"\n+ resolved \"https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.0.tgz#d2ca2dfc19c5a91c5a6615ce8e564ef0347e2a40\"\n+\n home-or-tmp@^2.0.0:\n version \"2.0.0\"\n resolved \"https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8\"\n@@ -4004,6 +4014,10 @@ ignore@^3.3.5:\n version \"3.3.7\"\n resolved \"https://registry.npmjs.org/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021\"\n \n+immer@^1.2.1:\n+ version \"1.2.1\"\n+ resolved \"https://registry.npmjs.org/immer/-/immer-1.2.1.tgz#96e2ae29cdfc428f28120b832701931b92fa597c\"\n+\n import-local@^1.0.0:\n version \"1.0.0\"\n resolved \"https://registry.npmjs.org/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc\"\n@@ -4104,7 +4118,7 @@ interpret@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614\"\n \n-invariant@^2.2.2:\n+invariant@^2.0.0, invariant@^2.2.2:\n version \"2.2.4\"\n resolved \"https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6\"\n dependencies:\n@@ -5040,6 +5054,10 @@ locate-path@^2.0.0:\n p-locate \"^2.0.0\"\n path-exists \"^3.0.0\"\n \n+lodash-es@^4.17.5, lodash-es@^4.2.1:\n+ version \"4.17.8\"\n+ resolved \"https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.8.tgz#6fa8c8c5d337481df0bdf1c0d899d42473121e45\"\n+\n lodash._reinterpolate@~3.0.0:\n version \"3.0.0\"\n resolved \"https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d\"\n@@ -5149,7 +5167,7 @@ [email protected]:\n version \"4.17.2\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz#34a3055babe04ce42467b607d700072c7ff6bf42\"\n \[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\n version \"4.17.5\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511\"\n \n@@ -6467,7 +6485,7 @@ promise@^7.1.1:\n dependencies:\n asap \"~2.0.3\"\n \n-prop-types@^15.6.0:\n+prop-types@^15.5.8, prop-types@^15.6.0:\n version \"15.6.1\"\n resolved \"https://registry.npmjs.org/prop-types/-/prop-types-15.6.1.tgz#36644453564255ddda391191fb3a125cbdf654ca\"\n dependencies:\n@@ -6574,7 +6592,7 @@ quick-lru@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8\"\n \[email protected]:\[email protected], raf@^3.1.0:\n version \"3.4.0\"\n resolved \"https://registry.npmjs.org/raf/-/raf-3.4.0.tgz#a28876881b4bc2ca9117d4138163ddb80f781575\"\n dependencies:\n@@ -6645,9 +6663,9 @@ react-dev-utils@^5.0.0:\n strip-ansi \"3.0.1\"\n text-table \"0.2.0\"\n \n-react-dom@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.2.0.tgz#69003178601c0ca19b709b33a83369fe6124c044\"\n+react-dom@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.3.1.tgz#6a3c90a4fb62f915bdbcf6204422d93a7d4ca573\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6658,9 +6676,28 @@ react-error-overlay@^4.0.0:\n version \"4.0.0\"\n resolved \"https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-4.0.0.tgz#d198408a85b4070937a98667f500c832f86bd5d4\"\n \n-react@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react/-/react-16.2.0.tgz#a31bd2dab89bff65d42134fa187f24d054c273ba\"\n+react-motion@^0.5.2:\n+ version \"0.5.2\"\n+ resolved \"https://registry.npmjs.org/react-motion/-/react-motion-0.5.2.tgz#0dd3a69e411316567927917c6626551ba0607316\"\n+ dependencies:\n+ performance-now \"^0.2.0\"\n+ prop-types \"^15.5.8\"\n+ raf \"^3.1.0\"\n+\n+react-redux@^5.0.7:\n+ version \"5.0.7\"\n+ resolved \"https://registry.npmjs.org/react-redux/-/react-redux-5.0.7.tgz#0dc1076d9afb4670f993ffaef44b8f8c1155a4c8\"\n+ dependencies:\n+ hoist-non-react-statics \"^2.5.0\"\n+ invariant \"^2.0.0\"\n+ lodash \"^4.17.5\"\n+ lodash-es \"^4.17.5\"\n+ loose-envify \"^1.1.0\"\n+ prop-types \"^15.6.0\"\n+\n+react@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react/-/react-16.3.1.tgz#4a2da433d471251c69b6033ada30e2ed1202cfd8\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6788,6 +6825,15 @@ reduce-function-call@^1.0.1:\n dependencies:\n balanced-match \"^0.4.2\"\n \n+redux@^3.7.2:\n+ version \"3.7.2\"\n+ resolved \"https://registry.npmjs.org/redux/-/redux-3.7.2.tgz#06b73123215901d25d065be342eb026bc1c8537b\"\n+ dependencies:\n+ lodash \"^4.2.1\"\n+ lodash-es \"^4.2.1\"\n+ loose-envify \"^1.1.0\"\n+ symbol-observable \"^1.0.3\"\n+\n regenerate@^1.2.1:\n version \"1.3.3\"\n resolved \"https://registry.npmjs.org/regenerate/-/regenerate-1.3.3.tgz#0c336d3980553d755c39b586ae3b20aa49c82b7f\"\n@@ -7811,6 +7857,10 @@ [email protected]:\n version \"1.0.1\"\n resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4\"\n \n+symbol-observable@^1.0.3:\n+ version \"1.2.0\"\n+ resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804\"\n+\n symbol-tree@^3.2.2:\n version \"3.2.2\"\n resolved \"https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6\"\n", "diff --git a/packages/nuxt3/src/builder/builder.ts b/packages/nuxt3/src/builder/builder.ts\nindex a24bd88..ecc22ef 100644\n--- a/packages/nuxt3/src/builder/builder.ts\n+++ b/packages/nuxt3/src/builder/builder.ts\n@@ -3,6 +3,7 @@ import fsExtra from 'fs-extra'\n import { debounce } from 'lodash'\n import { BundleBuilder } from 'src/webpack'\n import { Nuxt } from '../core'\n+import { DeterminedGlobals, determineGlobals } from '../utils'\n import {\n templateData,\n compileTemplates,\n@@ -15,12 +16,14 @@ import Ignore from './ignore'\n \n export class Builder {\n nuxt: Nuxt\n+ globals: DeterminedGlobals\n ignore: Ignore\n- app: NuxtApp\n templates: NuxtTemplate[]\n+ app: NuxtApp\n \n constructor (nuxt) {\n this.nuxt = nuxt\n+ this.globals = determineGlobals(nuxt.options.globalName, nuxt.options.globals)\n this.ignore = new Ignore({\n rootDir: nuxt.options.srcDir,\n ignoreArray: nuxt.options.ignore.concat(\n@@ -32,6 +35,10 @@ export class Builder {\n build () {\n return build(this)\n }\n+\n+ close () {\n+ // TODO: close watchers\n+ }\n }\n \n // Extends VueRouter\ndiff --git a/packages/nuxt3/src/builder/template.ts b/packages/nuxt3/src/builder/template.ts\nindex 63a9115..fe09f16 100644\n--- a/packages/nuxt3/src/builder/template.ts\n+++ b/packages/nuxt3/src/builder/template.ts\n@@ -11,6 +11,7 @@ export interface NuxtTemplate {\n \n export function templateData (builder) {\n return {\n+ globals: builder.globals,\n app: builder.app\n }\n }\ndiff --git a/packages/nuxt3/src/builder/watch.ts b/packages/nuxt3/src/builder/watch.ts\nindex b4d1415..d148fec 100644\n--- a/packages/nuxt3/src/builder/watch.ts\n+++ b/packages/nuxt3/src/builder/watch.ts\n@@ -38,7 +38,8 @@ export function createWatcher (\n return {\n watchAll,\n watch,\n- debug\n+ debug,\n+ close: () => watcher.close()\n }\n }\n \ndiff --git a/packages/nuxt3/src/config/options.ts b/packages/nuxt3/src/config/options.ts\nindex 5aac8ac..6e7f93c 100644\n--- a/packages/nuxt3/src/config/options.ts\n+++ b/packages/nuxt3/src/config/options.ts\n@@ -12,7 +12,7 @@ import { DefaultConfiguration, defaultNuxtConfigFile, getDefaultNuxtConfig } fro\n import { deleteProp, mergeConfigs, setProp, overrideProp, Optional } from './transformers'\n \n interface InputConfiguration {\n- appTemplatePath?: string\n+ documentPath?: string\n layoutTransition?: string | DefaultConfiguration['layoutTransition']\n loading?: true | false | DefaultConfiguration['loading']\n manifest?: {\n@@ -197,13 +197,16 @@ function normalizeConfig (_options: CliConfiguration) {\n .concat(options.extensions))\n \n // If app.html is defined, set the template path to the user template\n- if (options.appTemplatePath === undefined) {\n- options.appTemplatePath = path.resolve(options.buildDir, 'views/app.template.html')\n- if (fs.existsSync(path.join(options.srcDir, 'app.html'))) {\n- options.appTemplatePath = path.join(options.srcDir, 'app.html')\n+ if (options.documentPath === undefined) {\n+ options.documentPath = path.resolve(options.buildDir, 'views/document.template.html')\n+ const userDocumentPath = path.join(options.srcDir, 'document.html')\n+ if (fs.existsSync(userDocumentPath)) {\n+ options.documentPath = userDocumentPath\n+ } else {\n+ options.watch.push(userDocumentPath)\n }\n } else {\n- options.appTemplatePath = path.resolve(options.srcDir, options.appTemplatePath)\n+ options.documentPath = path.resolve(options.srcDir, options.documentPath)\n }\n \n overrideProp(options.build, 'publicPath', options.build.publicPath.replace(/([^/])$/, '$1/'))\ndiff --git a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\nindex 3e3ce2d..482bd6b 100644\n--- a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n+++ b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n@@ -96,6 +96,9 @@ export default class SSRRenderer extends BaseRenderer {\n // Call Vue renderer renderToString\n let APP = await this.vueRenderer.renderToString(renderContext)\n \n+ // Wrap with Nuxt id\n+ APP = `<div id=\"${this.serverContext.globals.id}\">${APP}</div>`\n+\n // Call render:done in app\n await renderContext.nuxt.hooks.callHook('vue-renderer:done')\n \ndiff --git a/packages/nuxt3/src/webpack/configs/client.ts b/packages/nuxt3/src/webpack/configs/client.ts\nindex a257948..4fb35e0 100644\n--- a/packages/nuxt3/src/webpack/configs/client.ts\n+++ b/packages/nuxt3/src/webpack/configs/client.ts\n@@ -94,7 +94,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.ssr.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: false // Resources will be injected using bundleRenderer\n })\n@@ -104,7 +104,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.spa.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: true\n })\n", "diff --git a/.travis.yml b/.travis.yml\nindex d56185e..96510cb 100644\n--- a/.travis.yml\n+++ b/.travis.yml\n@@ -2,5 +2,6 @@ language: node_js\n node_js:\n - 'stable'\n script:\n+ - yarn lint\n - yarn build\n - yarn test\n", "diff --git a/core/main/README.md b/core/main/README.md\nindex e5e4c93..e9cfda9 100644\n--- a/core/main/README.md\n+++ b/core/main/README.md\n@@ -217,7 +217,7 @@ You can find the instructions [here](https://github.com/matteobruni/tsparticles/\n \n You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/svelte/README.md)\n \n-### VueJS\n+### VueJS 2.x\n \n #### `particles.vue`\n \n@@ -225,6 +225,14 @@ You can find the instructions [here](https://github.com/matteobruni/tsparticles/\n \n You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/vue/README.md)\n \n+### VueJS 3.x\n+\n+#### `particles.vue3`\n+\n+[![npm](https://img.shields.io/npm/v/particles.vue3)](https://www.npmjs.com/package/particles.vue3) [![npm](https://img.shields.io/npm/dm/particles.vue3)](https://www.npmjs.com/package/particles.vue3)\n+\n+You can find the instructions [here](https://github.com/matteobruni/tsparticles/blob/master/components/vue3/README.md)\n+\n ---\n \n ## **_Demo / Generator_**\ndiff --git a/core/main/tsconfig.json b/core/main/tsconfig.json\nindex 7916bc5..72399c0 100644\n--- a/core/main/tsconfig.json\n+++ b/core/main/tsconfig.json\n@@ -107,10 +107,14 @@\n \"source\": \"../../components/react/README.md\"\n },\n {\n- \"title\": \"Vue\",\n+ \"title\": \"Vue 2.x\",\n \"source\": \"../../components/vue/README.md\"\n },\n {\n+ \"title\": \"Vue 3.x\",\n+ \"source\": \"../../components/vue3/README.md\"\n+ },\n+ {\n \"title\": \"Svelte\",\n \"source\": \"../../components/svelte/README.md\"\n },\n", "diff --git a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\nindex 5138589..f756981 100644\n--- a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n@@ -2,40 +2,46 @@\n <div\n class=\"backgroundColor pa-2 menu-filter-dropdown\"\n :class=\"{ nested }\"\n- :style=\"{ width: nested ? '100%' : '530px' }\"\n+ :style=\"{ width: nested ? '100%' : '630px' }\"\n >\n <div class=\"grid\" @click.stop>\n <template v-for=\"(filter, i) in filters\" dense>\n <template v-if=\"filter.status !== 'delete'\">\n- <div v-if=\"filter.is_group\" :key=\"i\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n- <div class=\"d-flex\" style=\"gap: 6px; padding: 0 6px\">\n- <v-icon\n- v-if=\"!filter.readOnly\"\n- small\n- class=\"nc-filter-item-remove-btn\"\n- @click.stop=\"deleteFilter(filter, i)\"\n- >\n- mdi-close-box\n- </v-icon>\n- <span v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n- <v-select\n- v-else\n- v-model=\"filter.logical_op\"\n- class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n- :items=\"['and', 'or']\"\n- solo\n- flat\n- dense\n- hide-details\n- placeholder=\"Group op\"\n- @click.stop\n- @change=\"saveOrUpdate(filter, i)\"\n- >\n- <template #item=\"{ item }\">\n- <span class=\"caption font-weight-regular\">{{ item }}</span>\n- </template>\n- </v-select>\n- </div>\n+ <template v-if=\"filter.is_group\">\n+ <v-icon\n+ v-if=\"!filter.readOnly\"\n+ small\n+ class=\"nc-filter-item-remove-btn\"\n+ @click.stop=\"deleteFilter(filter, i)\"\n+ :key=\"i + '_1'\"\n+ >\n+ mdi-close-box\n+ </v-icon>\n+ <span v-else :key=\"i + '_1'\" />\n+\n+ <span :key=\"i + '_2'\" v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <v-select\n+ v-else\n+ :key=\"i + '_2'\"\n+ v-model=\"filter.logical_op\"\n+ class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n+ :items=\"['and', 'or']\"\n+ solo\n+ flat\n+ dense\n+ hide-details\n+ placeholder=\"Group op\"\n+ @click.stop\n+ @change=\"saveOrUpdate(filter, i)\"\n+ >\n+ <template #item=\"{ item }\">\n+ <span class=\"caption font-weight-regular\">{{ item }}</span>\n+ </template>\n+ </v-select>\n+ <span :key=\"i + '_3'\" style=\"grid-column: span 3\"></span>\n+ </template>\n+\n+ <div v-if=\"filter.is_group\" :key=\"i + '_4'\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n <column-filter\n v-if=\"filter.id || shared\"\n ref=\"nestedFilter\"\n@@ -54,19 +60,19 @@\n <template v-else>\n <v-icon\n v-if=\"!filter.readOnly\"\n- :key=\"i + '_1'\"\n+ :key=\"i + '_5'\"\n small\n class=\"nc-filter-item-remove-btn\"\n @click.stop=\"deleteFilter(filter, i)\"\n >\n mdi-close-box\n </v-icon>\n- <span v-else :key=\"i + '_1'\" />\n- <span v-if=\"!i\" :key=\"i + '_2'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <span v-else :key=\"i + '_5'\" />\n+ <span v-if=\"!i\" :key=\"i + '_6'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n \n <v-select\n v-else\n- :key=\"i + '_2'\"\n+ :key=\"i + '_6'\"\n v-model=\"filter.logical_op\"\n class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n :items=\"['and', 'or']\"\n@@ -84,7 +90,7 @@\n </v-select>\n \n <field-list-auto-complete-dropdown\n- :key=\"i + '_3'\"\n+ :key=\"i + '_7'\"\n v-model=\"filter.fk_column_id\"\n class=\"caption nc-filter-field-select\"\n :columns=\"columns\"\n@@ -94,7 +100,7 @@\n />\n \n <v-select\n- :key=\"i + '_4'\"\n+ :key=\"i + '_8'\"\n v-model=\"filter.comparison_op\"\n class=\"flex-shrink-1 flex-grow-0 caption nc-filter-operation-select\"\n :items=\"filterComparisonOp(filter)\"\n@@ -114,11 +120,11 @@\n <span class=\"caption font-weight-regular\">{{ item.text }}</span>\n </template>\n </v-select>\n- <span v-else :key=\"i + '_4'\"></span>\n+ <span v-else :key=\"i + '_8'\"></span>\n <span v-if=\"['null', 'notnull', 'empty', 'notempty'].includes(filter.comparison_op)\" :key=\"i + '_5'\" />\n <v-checkbox\n v-else-if=\"types[filter.field] === 'boolean'\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n dense\n :disabled=\"filter.readOnly\"\n@@ -126,7 +132,7 @@\n />\n <v-text-field\n v-else-if=\"filter && filter.fk_column_id\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n solo\n flat\n@@ -137,7 +143,7 @@\n @click.stop\n @input=\"saveOrUpdate(filter, i)\"\n />\n- <span v-else :key=\"i + '_5'\"></span>\n+ <span v-else :key=\"i + '_9'\"></span>\n </template>\n </template>\n </template>\n@@ -411,6 +417,7 @@ export default {\n parentId: this.parentId,\n is_group: true,\n status: 'update',\n+ logical_op: 'and',\n });\n this.filters = this.filters.slice();\n const index = this.filters.length - 1;\n@@ -478,4 +485,8 @@ export default {\n column-gap: 6px;\n row-gap: 6px;\n }\n+\n+.nc-filter-value-select {\n+ min-width: 100px;\n+}\n </style>\n"]
5
["7e04a5e829d7416e312ac342a00a11787745753b", "09476134eeeb12c025618919ab9a795a680a9b30", "2ac99c0a66a1adc18ee4ef660608f814823dd198", "e4c3e2cff769ce46d22d5c8f7dd527510443a8a7", "4f86f2570b274c45605cc59d9adb38f7ed30cd17"]
["build", "feat", "ci", "docs", "refactor"]
update pr condition,add clean up test Add another clean up test, which verifies that the state is cleaned up after the timer (non-recurring) is triggered.,Introduce timediff fn (stub),fix `memtable` docstrings,fix build
["diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml\nindex 697ca8e..23f4475 100644\n--- a/.github/workflows/release-pr.yml\n+++ b/.github/workflows/release-pr.yml\n@@ -3,7 +3,6 @@ name: release\n on:\n issue_comment:\n types: [created]\n- contains: \"/trigger release\"\n \n env:\n # 7 GiB by default on GitHub, setting to 6 GiB\n@@ -11,6 +10,7 @@ env:\n \n jobs:\n release-pr:\n+ if: ${{ github.event.issue.pull_request && github.event.comment.body == '/trigger release' }}\n permissions:\n id-token: write\n runs-on: ubuntu-latest\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\nindex d36b4c9..ca5047f 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n@@ -630,6 +630,40 @@ public final class ProcessExecutionCleanStateTest {\n }\n \n @Test\n+ public void testProcessWithTriggerTimerStartEvent() {\n+ // given\n+ final var deployment =\n+ engineRule\n+ .deployment()\n+ .withXmlResource(\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .timerWithDate(\"=now() + duration(\\\"PT15S\\\")\")\n+ .endEvent()\n+ .done())\n+ .deploy();\n+\n+ final var processDefinitionKey =\n+ deployment.getValue().getProcessesMetadata().get(0).getProcessDefinitionKey();\n+\n+ // when\n+ engineRule.awaitProcessingOf(\n+ RecordingExporter.timerRecords(TimerIntent.CREATED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .getFirst());\n+\n+ engineRule.increaseTime(Duration.ofSeconds(15));\n+\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n+ // then\n+ assertThatStateIsEmpty();\n+ }\n+\n+ @Test\n public void testProcessWithTimerStartEventRedeployment() {\n // given\n final var deployment =\n", "diff --git a/rust/Cargo.lock b/rust/Cargo.lock\nindex b42616f..4795eb6 100644\n--- a/rust/Cargo.lock\n+++ b/rust/Cargo.lock\n@@ -1287,7 +1287,7 @@ dependencies = [\n [[package]]\n name = \"datafusion\"\n version = \"5.1.0\"\n-source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=8df4132b83d896a0d3db5c82a4eaaa3eaa285d15#8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\"\n+source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=868f3c4de13d13cda84cee33475b9782b94fa60c#868f3c4de13d13cda84cee33475b9782b94fa60c\"\n dependencies = [\n \"ahash 0.7.4\",\n \"arrow 6.0.0\",\ndiff --git a/rust/cubesql/Cargo.toml b/rust/cubesql/Cargo.toml\nindex 3cb386a..9aef494 100644\n--- a/rust/cubesql/Cargo.toml\n+++ b/rust/cubesql/Cargo.toml\n@@ -9,7 +9,7 @@ documentation = \"https://cube.dev/docs\"\n homepage = \"https://cube.dev\"\n \n [dependencies]\n-datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\", default-features = false, features = [\"unicode_expressions\"] }\n+datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"868f3c4de13d13cda84cee33475b9782b94fa60c\", default-features = false, features = [\"unicode_expressions\"] }\n anyhow = \"1.0\"\n thiserror = \"1.0\"\n cubeclient = { path = \"../cubeclient\" }\ndiff --git a/rust/cubesql/src/compile/engine/df/intervals.rs b/rust/cubesql/src/compile/engine/df/intervals.rs\nnew file mode 100644\nindex 0000000..9e6cb7e\n--- /dev/null\n+++ b/rust/cubesql/src/compile/engine/df/intervals.rs\n@@ -0,0 +1,51 @@\n+#[macro_export]\n+macro_rules! make_string_interval_year_month {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let interval = $array.value($row) as f64;\n+ let years = (interval / 12_f64).floor();\n+ let month = interval - (years * 12_f64);\n+\n+ format!(\n+ \"{} years {} mons 0 days 0 hours 0 mins 0.00 secs\",\n+ years, month,\n+ )\n+ };\n+\n+ s\n+ }};\n+}\n+\n+#[macro_export]\n+macro_rules! make_string_interval_day_time {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let value: u64 = $array.value($row) as u64;\n+\n+ let days_parts: i32 = ((value & 0xFFFFFFFF00000000) >> 32) as i32;\n+ let milliseconds_part: i32 = (value & 0xFFFFFFFF) as i32;\n+\n+ let secs = milliseconds_part / 1000;\n+ let mins = secs / 60;\n+ let hours = mins / 60;\n+\n+ let secs = secs - (mins * 60);\n+ let mins = mins - (hours * 60);\n+\n+ format!(\n+ \"0 years 0 mons {} days {} hours {} mins {}.{:02} secs\",\n+ days_parts,\n+ hours,\n+ mins,\n+ secs,\n+ (milliseconds_part % 1000),\n+ )\n+ };\n+\n+ s\n+ }};\n+}\ndiff --git a/rust/cubesql/src/compile/engine/df/mod.rs b/rust/cubesql/src/compile/engine/df/mod.rs\nindex a19a970..3097523 100644\n--- a/rust/cubesql/src/compile/engine/df/mod.rs\n+++ b/rust/cubesql/src/compile/engine/df/mod.rs\n@@ -1 +1,2 @@\n pub mod coerce;\n+pub mod intervals;\ndiff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex 55b8bc1..0e160b3 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -1,14 +1,19 @@\n use std::any::type_name;\n use std::sync::Arc;\n \n+\n use datafusion::{\n arrow::{\n array::{\n ArrayRef, BooleanArray, BooleanBuilder, GenericStringArray, Int32Builder,\n- PrimitiveArray, StringBuilder, UInt32Builder,\n+ IntervalDayTimeBuilder, PrimitiveArray, StringBuilder,\n+ UInt32Builder,\n },\n compute::cast,\n- datatypes::{DataType, Int64Type},\n+ datatypes::{\n+ DataType, Int64Type, IntervalUnit, TimeUnit,\n+ TimestampNanosecondType,\n+ },\n },\n error::DataFusionError,\n logical_plan::create_udf,\n@@ -399,3 +404,63 @@ pub fn create_convert_tz_udf() -> ScalarUDF {\n &fun,\n )\n }\n+\n+pub fn create_timediff_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 2);\n+\n+ let left_dt = &args[0];\n+ let right_dt = &args[1];\n+\n+ let left_date = match left_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(left_dt, \"left_dt\", TimestampNanosecondType);\n+ let ts = arr.value(0);\n+\n+ // NaiveDateTime::from_timestamp(ts, 0)\n+ ts\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"left_dt argument must be a Timestamp, actual: {}\",\n+ left_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let right_date = match right_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(right_dt, \"right_dt\", TimestampNanosecondType);\n+ arr.value(0)\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"right_dt argument must be a Timestamp, actual: {}\",\n+ right_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let diff = right_date - left_date;\n+ if diff != 0 {\n+ return Err(DataFusionError::NotImplemented(format!(\n+ \"timediff is not implemented, it's stub\"\n+ )));\n+ }\n+\n+ let mut interal_arr = IntervalDayTimeBuilder::new(1);\n+ interal_arr.append_value(diff)?;\n+\n+ Ok(Arc::new(interal_arr.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime))));\n+\n+ ScalarUDF::new(\n+ \"timediff\",\n+ &Signature::any(2, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex a88da57..6121aa0 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -32,8 +32,8 @@ use self::engine::context::SystemVar;\n use self::engine::provider::CubeContext;\n use self::engine::udf::{\n create_connection_id_udf, create_convert_tz_udf, create_current_user_udf, create_db_udf,\n- create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_user_udf,\n- create_version_udf,\n+ create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_timediff_udf,\n+ create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1450,6 +1450,7 @@ impl QueryPlanner {\n ctx.register_udf(create_if_udf());\n ctx.register_udf(create_least_udf());\n ctx.register_udf(create_convert_tz_udf());\n+ ctx.register_udf(create_timediff_udf());\n \n let state = ctx.state.lock().unwrap().clone();\n let cube_ctx = CubeContext::new(&state, &self.context.cubes);\n@@ -3226,6 +3227,25 @@ mod tests {\n }\n \n #[tokio::test]\n+ async fn test_timediff() -> Result<(), CubeError> {\n+ assert_eq!(\n+ execute_df_query(\n+ \"select \\\n+ timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1\n+ \".to_string()\n+ )\n+ .await?,\n+ \"+------------------------------------------------+\\n\\\n+ | r1 |\\n\\\n+ +------------------------------------------------+\\n\\\n+ | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\\n\\\n+ +------------------------------------------------+\"\n+ );\n+\n+ Ok(())\n+ }\n+\n+ #[tokio::test]\n async fn test_metabase() -> Result<(), CubeError> {\n assert_eq!(\n execute_df_query(\ndiff --git a/rust/cubesql/src/mysql/dataframe.rs b/rust/cubesql/src/mysql/dataframe.rs\nindex fa246aa..2443458 100644\n--- a/rust/cubesql/src/mysql/dataframe.rs\n+++ b/rust/cubesql/src/mysql/dataframe.rs\n@@ -3,9 +3,10 @@ use std::fmt::{self, Debug, Formatter};\n use chrono::{SecondsFormat, TimeZone, Utc};\n use comfy_table::{Cell, Table};\n use datafusion::arrow::array::{\n- Array, Float64Array, Int32Array, Int64Array, StringArray, TimestampMicrosecondArray,\n- UInt32Array,\n+ Array, Float64Array, Int32Array, Int64Array, IntervalDayTimeArray, IntervalYearMonthArray,\n+ StringArray, TimestampMicrosecondArray, UInt32Array,\n };\n+use datafusion::arrow::datatypes::IntervalUnit;\n use datafusion::arrow::{\n array::{BooleanArray, TimestampNanosecondArray, UInt64Array},\n datatypes::{DataType, TimeUnit},\n@@ -15,6 +16,7 @@ use log::{error, warn};\n use msql_srv::{ColumnFlags, ColumnType};\n \n use crate::{compile::builder::CompiledQueryFieldMeta, CubeError};\n+use crate::{make_string_interval_day_time, make_string_interval_year_month};\n \n #[derive(Clone, Debug)]\n pub struct Column {\n@@ -309,6 +311,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro\n DataType::Binary => Ok(ColumnType::MYSQL_TYPE_BLOB),\n DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Timestamp(_, _) => Ok(ColumnType::MYSQL_TYPE_STRING),\n+ DataType::Interval(_) => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Float16 | DataType::Float64 => Ok(ColumnType::MYSQL_TYPE_DOUBLE),\n DataType::Boolean => Ok(ColumnType::MYSQL_TYPE_TINY),\n DataType::Int8\n@@ -402,6 +405,24 @@ pub fn batch_to_dataframe(batches: &Vec<RecordBatch>) -> Result<DataFrame, CubeE\n });\n }\n }\n+ DataType::Interval(IntervalUnit::DayTime) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalDayTimeArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_day_time!(a, i)));\n+ }\n+ }\n+ DataType::Interval(IntervalUnit::YearMonth) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalYearMonthArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_year_month!(a, i)));\n+ }\n+ }\n DataType::Boolean => {\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n for i in 0..num_rows {\n", "diff --git a/ibis/expr/api.py b/ibis/expr/api.py\nindex 93fabaa..66a2ea9 100644\n--- a/ibis/expr/api.py\n+++ b/ibis/expr/api.py\n@@ -403,15 +403,21 @@ def memtable(\n >>> import ibis\n >>> t = ibis.memtable([{\"a\": 1}, {\"a\": 2}])\n >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ a\n+ 0 1\n+ 1 2\n \n >>> t = ibis.memtable([{\"a\": 1, \"b\": \"foo\"}, {\"a\": 2, \"b\": \"baz\"}])\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n \n Create a table literal without column names embedded in the data and pass\n `columns`\n@@ -420,10 +426,22 @@ def memtable(\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n+\n+ Create a table literal without column names embedded in the data. Ibis\n+ generates column names if none are provided.\n+\n+ >>> t = ibis.memtable([(1, \"foo\"), (2, \"baz\")])\n+ >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ col0 col1\n+ 0 1 foo\n+ 1 2 baz\n \"\"\"\n if columns is not None and schema is not None:\n raise NotImplementedError(\n", "diff --git a/server/Dockerfile b/server/Dockerfile\nindex 2f203bb..a84c31e 100755\n--- a/server/Dockerfile\n+++ b/server/Dockerfile\n@@ -9,9 +9,11 @@ ENV TZ utc\n WORKDIR /src\n \n COPY package.json /src\n+COPY package-lock.json /src\n+COPY tsconfig.json /src\n RUN npm install --production --no-optional\n \n COPY public /src/public\n COPY dist /src\n \n-CMD [ \"node\", \"./server/index.js\" ]\n+CMD [ \"node\", \"-r\", \"tsconfig-paths/register\", \"./server/index.js\" ]\ndiff --git a/server/package-lock.json b/server/package-lock.json\nindex 6cacfa2..236f1bb 100644\n--- a/server/package-lock.json\n+++ b/server/package-lock.json\n@@ -2164,8 +2164,7 @@\n \"@types/json5\": {\n \"version\": \"0.0.29\",\n \"resolved\": \"https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz\",\n- \"integrity\": \"sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\",\n- \"dev\": true\n+ \"integrity\": \"sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\"\n },\n \"@types/jsonwebtoken\": {\n \"version\": \"8.3.5\",\n@@ -9246,8 +9245,7 @@\n \"strip-bom\": {\n \"version\": \"3.0.0\",\n \"resolved\": \"https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz\",\n- \"integrity\": \"sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=\",\n- \"dev\": true\n+ \"integrity\": \"sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=\"\n },\n \"strip-final-newline\": {\n \"version\": \"2.0.0\",\n@@ -9524,7 +9522,6 @@\n \"version\": \"3.9.0\",\n \"resolved\": \"https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz\",\n \"integrity\": \"sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==\",\n- \"dev\": true,\n \"requires\": {\n \"@types/json5\": \"^0.0.29\",\n \"json5\": \"^1.0.1\",\n@@ -9536,7 +9533,6 @@\n \"version\": \"1.0.1\",\n \"resolved\": \"https://registry.npmjs.org/json5/-/json5-1.0.1.tgz\",\n \"integrity\": \"sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==\",\n- \"dev\": true,\n \"requires\": {\n \"minimist\": \"^1.2.0\"\n }\n@@ -9544,8 +9540,7 @@\n \"minimist\": {\n \"version\": \"1.2.5\",\n \"resolved\": \"https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz\",\n- \"integrity\": \"sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==\",\n- \"dev\": true\n+ \"integrity\": \"sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==\"\n }\n }\n },\ndiff --git a/server/package.json b/server/package.json\nindex 35426e9..896e9b3 100644\n--- a/server/package.json\n+++ b/server/package.json\n@@ -41,6 +41,7 @@\n \"pino-cloudwatch\": \"0.7.0\",\n \"pino-multi-stream\": \"4.2.0\",\n \"reflect-metadata\": \"0.1.13\",\n+ \"tsconfig-paths\": \"3.9.0\",\n \"typeorm\": \"0.2.37\"\n },\n \"devDependencies\": {\n@@ -69,7 +70,6 @@\n \"pino-pretty\": \"3.6.1\",\n \"ts-jest\": \"27.0.7\",\n \"ts-node-dev\": \"1.1.8\",\n- \"tsconfig-paths\": \"3.9.0\",\n \"typescript\": \"4.3.5\"\n },\n \"jest-junit\": {\n"]
5
["f8c7b34bdeedcf1a4628cd50b23920afeaf57cb6", "aa746b764e6c54bbbd631210fce35df842d09b12", "29dfb9716298c5a579c0ffba6742e13a29325670", "72bc0f5172c0a3d17bde29cfc00db4c60d2fee3a", "a827777f41e90b6332c191d05bae8db525de6f38"]
["ci", "test", "feat", "docs", "build"]
support multi deploy in ReplayStateTest In order to support cases for call-activity, the ReplayStateTest needs to be able to deploy multiple processes.,make jq use compact json for rebase branch query,auto focus inputs in survey form,update deps,Improved Config Loading #423
["diff --git a/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java b/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\nindex 77e320f..0389291 100644\n--- a/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\n@@ -22,9 +22,9 @@ import io.zeebe.protocol.record.intent.ProcessInstanceIntent;\n import io.zeebe.protocol.record.value.BpmnElementType;\n import io.zeebe.test.util.record.RecordingExporter;\n import java.time.Duration;\n+import java.util.ArrayList;\n import java.util.Collection;\n import java.util.List;\n-import java.util.Optional;\n import java.util.function.Function;\n import org.assertj.core.api.SoftAssertions;\n import org.awaitility.Awaitility;\n@@ -170,7 +170,7 @@ public final class ReplayStateTest {\n @Test\n public void shouldRestoreState() {\n // given\n- testCase.process.ifPresent(process -> engine.deployment().withXmlResource(process).deploy());\n+ testCase.processes.forEach(process -> engine.deployment().withXmlResource(process).deploy());\n \n final Record<?> finalRecord = testCase.execution.apply(engine);\n \n@@ -227,7 +227,7 @@ public final class ReplayStateTest {\n \n private static final class TestCase {\n private final String description;\n- private Optional<BpmnModelInstance> process = Optional.empty();\n+ private final List<BpmnModelInstance> processes = new ArrayList<>();\n private Function<EngineRule, Record<?>> execution =\n engine -> RecordingExporter.records().getFirst();\n \n@@ -236,7 +236,7 @@ public final class ReplayStateTest {\n }\n \n private TestCase withProcess(final BpmnModelInstance process) {\n- this.process = Optional.of(process);\n+ processes.add(process);\n return this;\n }\n \n", "diff --git a/.github/workflows/ibis-rebase-nightly.yml b/.github/workflows/ibis-rebase-nightly.yml\nindex 0e284b0..4a3ec7a 100644\n--- a/.github/workflows/ibis-rebase-nightly.yml\n+++ b/.github/workflows/ibis-rebase-nightly.yml\n@@ -22,7 +22,7 @@ jobs:\n | cut -d ' ' -f2 \\\n | grep -P '\\d+\\.x\\.x' \\\n | xargs printf '\"%s\"' \\\n- | jq -s '{branch: .}')\n+ | jq -rcMs '{branch: .}')\n \n echo \"::set-output name=matrix::$branches\"\n \n", "diff --git a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\nindex b2a90d8..dbad824 100644\n--- a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n+++ b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n@@ -6,6 +6,7 @@ import {\n DropZoneRef,\n computed,\n onKeyStroke,\n+ onMounted,\n provide,\n ref,\n useEventListener,\n@@ -85,6 +86,8 @@ function transition(direction: TransitionDirection) {\n \n setTimeout(() => {\n isTransitioning.value = false\n+\n+ setTimeout(focusInput, 100)\n }, 1000)\n }\n \n@@ -113,6 +116,19 @@ async function goPrevious() {\n goToPrevious()\n }\n \n+function focusInput() {\n+ if (document && typeof document !== 'undefined') {\n+ const inputEl =\n+ (document.querySelector('.nc-cell input') as HTMLInputElement) ||\n+ (document.querySelector('.nc-cell textarea') as HTMLTextAreaElement)\n+\n+ if (inputEl) {\n+ inputEl.select()\n+ inputEl.focus()\n+ }\n+ }\n+}\n+\n useEventListener('wheel', (event) => {\n if (Math.abs(event.deltaX) < Math.abs(event.deltaY)) {\n // Scrolling more vertically than horizontally\n@@ -130,6 +146,8 @@ useEventListener('wheel', (event) => {\n \n onKeyStroke(['ArrowLeft', 'ArrowDown'], goPrevious)\n onKeyStroke(['ArrowRight', 'ArrowUp', 'Enter', 'Space'], goNext)\n+\n+onMounted(focusInput)\n </script>\n \n <template>\n", "diff --git a/package.json b/package.json\nindex 3696132..ba002d2 100755\n--- a/package.json\n+++ b/package.json\n@@ -34,7 +34,6 @@\n \"koa-router\": \"~7.4.0\",\n \"koa-session\": \"~5.10.1\",\n \"koa-static\": \"~5.0.0\",\n- \"koa2-ratelimit\": \"~0.8.0\",\n \"koa2-swagger-ui\": \"~2.13.2\",\n \"node-fetch\": \"^2.5.0\",\n \"passport-github\": \"~1.1.0\",\ndiff --git a/src/index.ts b/src/index.ts\nindex 847e8aa..8c9baff 100755\n--- a/src/index.ts\n+++ b/src/index.ts\n@@ -4,7 +4,6 @@ import * as Koa from 'koa';\n import * as bodyParser from 'koa-bodyparser';\n import * as session from 'koa-session';\n import * as serve from 'koa-static';\n-// import { RateLimit } from 'koa2-ratelimit';\n import { Server } from 'net';\n \n import { setupPassport } from './auth';\ndiff --git a/src/typings/koa-router.d.ts b/src/typings/koa-router.d.ts\ndeleted file mode 100644\nindex f891ed8..0000000\n--- a/src/typings/koa-router.d.ts\n+++ /dev/null\n@@ -1,16 +0,0 @@\n-import * as koa from 'koa-router';\n-import { IUserSession } from '../models';\n-import { ILogger } from '../logger';\n-\n-declare module 'koa-router' {\n- export interface IRouterContext {\n- state: { user: IUserSession | undefined };\n- logger: ILogger;\n- }\n-}\n-\n-declare module 'koa' {\n- export interface Context {\n- logger: ILogger;\n- }\n-}\ndiff --git a/src/typings/koa2-ratelimit/index.d.ts b/src/typings/koa2-ratelimit/index.d.ts\ndeleted file mode 100644\nindex cc73228..0000000\n--- a/src/typings/koa2-ratelimit/index.d.ts\n+++ /dev/null\n@@ -1,13 +0,0 @@\n-declare module 'koa2-ratelimit' {\n- import * as Koa from 'koa';\n- interface Props {\n- interval: { min?: number; hour?: number }; // 15 minutes = 15*60*1000\n- max: number;\n- message?: string;\n- prefixKey?: string;\n- getUserId?: (ctx: Koa.Context) => Promise<string>;\n- }\n- export namespace RateLimit {\n- function middleware(options: Props): Koa.Middleware;\n- }\n-}\ndiff --git a/src/typings/mockingoose/index.d.ts b/src/typings/mockingoose/index.d.ts\ndeleted file mode 100644\nindex 757c4e4..0000000\n--- a/src/typings/mockingoose/index.d.ts\n+++ /dev/null\n@@ -1,5 +0,0 @@\n-declare module 'mockingoose' {\n- const mockingoose: any;\n-\n- export default mockingoose;\n-}\n", "diff --git a/CHANGELOG.md b/CHANGELOG.md\nindex 76dd749..2087803 100644\n--- a/CHANGELOG.md\n+++ b/CHANGELOG.md\n@@ -7,6 +7,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)\n ## [Unreleased]\n * Fix the tab '(Sync)' suffix in named tabs (https://github.com/zellij-org/zellij/pull/410)\n * Improve performance when multiple panes are open (https://github.com/zellij-org/zellij/pull/318)\n+* Improve error reporting and tests of configuration (https://github.com/zellij-org/zellij/pull/423)\n \n ## [0.6.0] - 2021-04-29\n * Doesn't quit anymore on single `q` press while in tab mode (https://github.com/zellij-org/zellij/pull/342)\n"]
5
["834a9e3cfd6439f295072e5808a02cf2a35ba083", "4638dcdf7011e8e42d11fde04f068f22ee20fa1d", "5373c3036866db58b322b424d3be9dedff57a376", "f46c6c6c26a14312aa05a77ff2a14aebd74e13ac", "099861ff5b0f83773ca0af4c70e6e39be3b0336c"]
["test", "ci", "feat", "build", "docs"]
parallelize pybind11 build,verify property exist in row object Signed-off-by: Pranav C <[email protected]>,dashboard removed unused code,run nix macos jobs on macos-13 to try and avoid SIP,removing automatic page push on nav
["diff --git a/poetry-overrides.nix b/poetry-overrides.nix\nindex d37c5ed..aaaaf02 100644\n--- a/poetry-overrides.nix\n+++ b/poetry-overrides.nix\n@@ -82,4 +82,11 @@ self: super:\n {\n patches = (attrs.patches or [ ]) ++ [ ./patches/watchdog-force-kqueue.patch ];\n });\n+\n+ pybind11 = super.pybind11.overridePythonAttrs (_: {\n+ postBuild = ''\n+ # build tests\n+ make -j $NIX_BUILD_CORES -l $NIX_BUILD_CORES\n+ '';\n+ });\n }\n", "diff --git a/packages/nc-gui/components/smartsheet/Grid.vue b/packages/nc-gui/components/smartsheet/Grid.vue\nindex 8ff5b1d..e83e2ab 100644\n--- a/packages/nc-gui/components/smartsheet/Grid.vue\n+++ b/packages/nc-gui/components/smartsheet/Grid.vue\n@@ -534,7 +534,7 @@ const saveOrUpdateRecords = async (args: { metaValue?: TableType; viewMetaValue?\n currentRow.rowMeta.changed = false\n for (const field of (args.metaValue || meta.value)?.columns ?? []) {\n if (isVirtualCol(field)) continue\n- if (currentRow.row[field.title!] !== currentRow.oldRow[field.title!]) {\n+ if (field.title! in currentRow.row && currentRow.row[field.title!] !== currentRow.oldRow[field.title!]) {\n await updateOrSaveRow(currentRow, field.title!, {}, args)\n }\n }\n", "diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py\nindex e5bc800..1afa105 100644\n--- a/api/chalicelib/core/dashboard.py\n+++ b/api/chalicelib/core/dashboard.py\n@@ -126,13 +126,6 @@ SESSIONS_META_FIELDS = {\"revId\": \"rev_id\",\n \"browser\": \"user_browser\"}\n \n \n-def __get_domains_errors_neutral(rows):\n- neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}\n- if len(neutral.keys()) == 0:\n- neutral = {\"All\": 0}\n- return neutral\n-\n-\n def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),\n endTimestamp=TimeUTC.now(),\n density=7, **args):\n", "diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml\nnew file mode 100644\nindex 0000000..5be7d17\n--- /dev/null\n+++ b/.github/actionlint.yaml\n@@ -0,0 +1,7 @@\n+self-hosted-runner:\n+ # Labels of self-hosted runner in array of strings.\n+ labels: [macos-13]\n+# Configuration variables in array of strings defined in your repository or\n+# organization. `null` means disabling configuration variables check.\n+# Empty array means no configuration variable is allowed.\n+config-variables: null\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex e37346c..dce77e1 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -37,7 +37,7 @@ jobs:\n - \"3.10\"\n - \"3.11\"\n include:\n- - os: macos-latest\n+ - os: macos-13\n python-version: \"3.10\"\n steps:\n - name: checkout\ndiff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 005a850..8db22e2 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -3,7 +3,7 @@ ci:\n autofix_prs: false\n autoupdate_commit_msg: \"chore(deps): pre-commit.ci autoupdate\"\n skip:\n- - actionlint\n+ - actionlint-system\n - deadnix\n - just\n - nixpkgs-fmt\n@@ -17,9 +17,9 @@ default_stages:\n - commit\n repos:\n - repo: https://github.com/rhysd/actionlint\n- rev: v1.6.24\n+ rev: v1.6.25\n hooks:\n- - id: actionlint\n+ - id: actionlint-system\n - repo: https://github.com/psf/black\n rev: 23.3.0\n hooks:\n@@ -30,7 +30,7 @@ repos:\n - id: nbstripout\n exclude: .+/rendered/.+\n - repo: https://github.com/codespell-project/codespell\n- rev: v2.2.4\n+ rev: v2.2.5\n hooks:\n - id: codespell\n additional_dependencies:\n", "diff --git a/ionic/components/nav/test/basic/index.ts b/ionic/components/nav/test/basic/index.ts\nindex 4b1a8ea..2834f68 100644\n--- a/ionic/components/nav/test/basic/index.ts\n+++ b/ionic/components/nav/test/basic/index.ts\n@@ -63,12 +63,6 @@ class FirstPage {\n }\n }\n \n- onPageDidEnter() {\n- setTimeout(() => {\n- this.nav.push(PrimaryHeaderPage);\n- }, 1000);\n- }\n-\n setPages() {\n let items = [\n PrimaryHeaderPage\n"]
5
["9ab4c61975e073e214646443d088339cfdbaa88d", "c6403b62f8dc0e5bfe25a1c6306fb7040ca447ae", "b7b1d2b315443e1854403c8fe8f871c4632b5d31", "54cb6d4643b4a072ff997592a7fa14a69a6c068d", "cd9e6a2ab17c5961b0f977bb8a06f8545da49a97"]
["build", "fix", "refactor", "ci", "test"]
add title to badge icon,remove sync ts check,reorder startup steps,add link to roadmap,add workflow to release branches
["diff --git a/kibbeh/src/modules/room/chat/RoomChatList.tsx b/kibbeh/src/modules/room/chat/RoomChatList.tsx\nindex a7418e6..805a9a4 100644\n--- a/kibbeh/src/modules/room/chat/RoomChatList.tsx\n+++ b/kibbeh/src/modules/room/chat/RoomChatList.tsx\n@@ -16,6 +16,11 @@ interface ChatListProps {\n users: RoomUser[];\n }\n \n+interface BadgeIconData {\n+ emoji: string,\n+ title: string\n+}\n+\n export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const { setData } = useContext(UserPreviewModalContext);\n const { messages, toggleFrozen } = useRoomChatStore();\n@@ -48,11 +53,14 @@ export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const getBadgeIcon = (m: Message) => {\n const user = users.find((u) => u.id === m.userId);\n const isSpeaker = room.creatorId === user?.id || user?.roomPermissions?.isSpeaker;\n- let emoji = null;\n+ let badgeIconData: BadgeIconData | null = null;\n if (isSpeaker) {\n- emoji = \"\ud83d\udce3\";\n+ badgeIconData = {\n+ emoji: \"\ud83d\udce3\",\n+ title: \"Speaker\"\n+ };\n }\n- return emoji && <Twemoji text={emoji} style={{ marginRight: \"1ch\" }}/>;\n+ return badgeIconData && <Twemoji text={badgeIconData.emoji} title={badgeIconData.title} style={{ marginRight: \"1ch\" }}/>;\n };\n \n return (\n", "diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex 8b23fba..58a4c17 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -251,7 +251,7 @@ module.exports = {\n plugins: [\n argv.notypecheck\n ? null\n- : new ForkTsCheckerWebpackPlugin({tslint: true, async: false}),\n+ : new ForkTsCheckerWebpackPlugin({tslint: true}),\n // Makes some environment variables available in index.html.\n // The public URL is available as %PUBLIC_URL% in index.html, e.g.:\n // <link rel=\"shortcut icon\" href=\"%PUBLIC_URL%/favicon.ico\">\n", "diff --git a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\nindex 52fa3a9..d81c27a 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\n@@ -50,21 +50,20 @@ public final class BrokerStartupProcess {\n // must be executed before any disk space usage listeners are registered\n result.add(new DiskSpaceUsageMonitorStep());\n }\n-\n result.add(new MonitoringServerStep());\n result.add(new BrokerAdminServiceStep());\n+\n result.add(new ClusterServicesCreationStep());\n+ result.add(new ClusterServicesStep());\n \n result.add(new CommandApiServiceStep());\n result.add(new SubscriptionApiStep());\n-\n- result.add(new ClusterServicesStep());\n+ result.add(new LeaderManagementRequestHandlerStep());\n \n if (config.getGateway().isEnable()) {\n result.add(new EmbeddedGatewayServiceStep());\n }\n \n- result.add(new LeaderManagementRequestHandlerStep());\n result.add(new PartitionManagerStep());\n \n return result;\n", "diff --git a/packages/plugin-core/README.md b/packages/plugin-core/README.md\nindex 3c25c9b..c7506d4 100644\n--- a/packages/plugin-core/README.md\n+++ b/packages/plugin-core/README.md\n@@ -187,6 +187,10 @@ When the workspace opens, it will show dialogue to install the recommended exten\n \n See [[FAQ]] to answers for common questions.\n \n+# Roadmap\n+\n+Check out our [public roadmap](https://github.com/orgs/dendronhq/projects/1) to see the features we're working on and to vote for what you want to see next. \n+\n \n # Contributing\n \n", "diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml\nnew file mode 100644\nindex 0000000..697ca8e\n--- /dev/null\n+++ b/.github/workflows/release-pr.yml\n@@ -0,0 +1,48 @@\n+name: release\n+\n+on:\n+ issue_comment:\n+ types: [created]\n+ contains: \"/trigger release\"\n+\n+env:\n+ # 7 GiB by default on GitHub, setting to 6 GiB\n+ NODE_OPTIONS: --max-old-space-size=6144\n+\n+jobs:\n+ release-pr:\n+ permissions:\n+ id-token: write\n+ runs-on: ubuntu-latest\n+ timeout-minutes: 20\n+\n+ steps:\n+ - name: Ensure action is by maintainer\n+ uses: octokit/[email protected]\n+ id: check_role\n+ with:\n+ route: GET /repos/danielroe/roe.dev/collaborators/${{ github.event.comment.user.login }}\n+ env:\n+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+\n+ - uses: actions/checkout@v3\n+ with:\n+ fetch-depth: 0\n+\n+ - run: corepack enable\n+ - uses: actions/setup-node@v3\n+ with:\n+ node-version: 20\n+ cache: \"pnpm\"\n+\n+ - name: Install dependencies\n+ run: pnpm install\n+\n+ - name: Build\n+ run: pnpm build\n+\n+ - name: Release Edge\n+ run: ./scripts/release-edge.sh\n+ env:\n+ NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}\n+ NPM_CONFIG_PROVENANCE: true\ndiff --git a/package.json b/package.json\nindex 1074dcd..48bb566 100644\n--- a/package.json\n+++ b/package.json\n@@ -5,7 +5,7 @@\n \"license\": \"MIT\",\n \"type\": \"module\",\n \"scripts\": {\n- \"build\": \"FORCE_COLOR=1 pnpm --filter './packages/**' prepack\",\n+ \"build\": \"pnpm --filter './packages/**' prepack\",\n \"build:stub\": \"pnpm --filter './packages/**' prepack --stub\",\n \"cleanup\": \"rimraf 'packages/**/node_modules' 'examples/**/node_modules' 'docs/node_modules' 'playground/node_modules' 'node_modules'\",\n \"dev\": \"pnpm play\",\n"]
5
["6e5098655e6d9bb13f6423abe780cdf6b50ff13a", "411be831591b2ea15ca9138eaf8db81f51b5101e", "3e0c4cbf91fe5efc9b93baba93e4df93ef4ab5cd", "94202f01e44c58bee4419044f8a18ac5f1a50dff", "bc28d536c0dd1061ac96cea0241857c1d4e4e0f2"]
["feat", "build", "refactor", "docs", "ci"]
dedup redundant imports,update version (nightly.0),Handle different events.,new ShowDebug parameter calculate each segment timing new parameter to show/hide segment debug information set-poshprompt updated with the new showDebug parameter Force disabled segment to be visible for debug purpose,add --ignore-existing to all npx commands
["diff --git a/ibis/backends/base/__init__.py b/ibis/backends/base/__init__.py\nindex effd44c..a59c0ec 100644\n--- a/ibis/backends/base/__init__.py\n+++ b/ibis/backends/base/__init__.py\n@@ -31,7 +31,7 @@ import ibis.common.exceptions as exc\n import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n \n __all__ = ('BaseBackend', 'Database', 'connect')\n \ndiff --git a/ibis/backends/base/sql/__init__.py b/ibis/backends/base/sql/__init__.py\nindex e4f2129..7bbdaf9 100644\n--- a/ibis/backends/base/sql/__init__.py\n+++ b/ibis/backends/base/sql/__init__.py\n@@ -12,7 +12,7 @@ import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import BaseBackend\n from ibis.backends.base.sql.compiler import Compiler\n \ndiff --git a/ibis/backends/base/sql/alchemy/__init__.py b/ibis/backends/base/sql/alchemy/__init__.py\nindex 71cc0e8..ab89d7d 100644\n--- a/ibis/backends/base/sql/alchemy/__init__.py\n+++ b/ibis/backends/base/sql/alchemy/__init__.py\n@@ -11,7 +11,7 @@ import ibis\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.alchemy.database import AlchemyDatabase, AlchemyTable\n from ibis.backends.base.sql.alchemy.datatypes import (\ndiff --git a/ibis/backends/base/sql/alchemy/query_builder.py b/ibis/backends/base/sql/alchemy/query_builder.py\nindex 54c74ba..0ec432f 100644\n--- a/ibis/backends/base/sql/alchemy/query_builder.py\n+++ b/ibis/backends/base/sql/alchemy/query_builder.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import functools\n \n import sqlalchemy as sa\n-import sqlalchemy.sql as sql\n+from sqlalchemy import sql\n \n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/base/sql/compiler/base.py b/ibis/backends/base/sql/compiler/base.py\nindex 84102aa..fb44667 100644\n--- a/ibis/backends/base/sql/compiler/base.py\n+++ b/ibis/backends/base/sql/compiler/base.py\n@@ -7,7 +7,7 @@ import toolz\n \n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n class DML(abc.ABC):\ndiff --git a/ibis/backends/base/sql/compiler/query_builder.py b/ibis/backends/base/sql/compiler/query_builder.py\nindex a2d5214..95f5e8d 100644\n--- a/ibis/backends/base/sql/compiler/query_builder.py\n+++ b/ibis/backends/base/sql/compiler/query_builder.py\n@@ -8,7 +8,7 @@ import toolz\n import ibis.common.exceptions as com\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.compiler.base import DML, QueryAST, SetOp\n from ibis.backends.base.sql.compiler.select_builder import SelectBuilder, _LimitSpec\n from ibis.backends.base.sql.compiler.translator import ExprTranslator, QueryContext\ndiff --git a/ibis/backends/base/sql/registry/main.py b/ibis/backends/base/sql/registry/main.py\nindex 77f70a5..586ace5 100644\n--- a/ibis/backends/base/sql/registry/main.py\n+++ b/ibis/backends/base/sql/registry/main.py\n@@ -4,7 +4,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import (\n aggregate,\n binary_infix,\ndiff --git a/ibis/backends/base/sql/registry/timestamp.py b/ibis/backends/base/sql/registry/timestamp.py\nindex 412eab1..3c8571f 100644\n--- a/ibis/backends/base/sql/registry/timestamp.py\n+++ b/ibis/backends/base/sql/registry/timestamp.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n def extract_field(sql_attr):\ndiff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 8db6672..bb1b9ba 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -3,9 +3,9 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.backends.clickhouse.tests.conftest import (\n CLICKHOUSE_HOST,\n CLICKHOUSE_PASS,\ndiff --git a/ibis/backends/conftest.py b/ibis/backends/conftest.py\nindex 3a974da..ba7ad75 100644\n--- a/ibis/backends/conftest.py\n+++ b/ibis/backends/conftest.py\n@@ -20,7 +20,7 @@ if TYPE_CHECKING:\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import _get_backend_names\n \n TEST_TABLES = {\ndiff --git a/ibis/backends/dask/execution/util.py b/ibis/backends/dask/execution/util.py\nindex 61bff7e..7ed0c10 100644\n--- a/ibis/backends/dask/execution/util.py\n+++ b/ibis/backends/dask/execution/util.py\n@@ -9,13 +9,13 @@ import pandas as pd\n from dask.dataframe.groupby import SeriesGroupBy\n \n import ibis.backends.pandas.execution.util as pd_util\n-import ibis.common.graph as graph\n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n import ibis.util\n from ibis.backends.dask.core import execute\n from ibis.backends.pandas.trace import TraceTwoLevelDispatcher\n+from ibis.common import graph\n from ibis.expr.scope import Scope\n \n if TYPE_CHECKING:\ndiff --git a/ibis/backends/duckdb/datatypes.py b/ibis/backends/duckdb/datatypes.py\nindex fd6b8f5..52c0719 100644\n--- a/ibis/backends/duckdb/datatypes.py\n+++ b/ibis/backends/duckdb/datatypes.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import parsy as p\n import toolz\n \n-import ibis.util as util\n+from ibis import util\n from ibis.common.parsing import (\n COMMA,\n FIELD,\ndiff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\nindex 4ad2057..8299a28 100644\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -20,7 +20,7 @@ import ibis.config\n import ibis.expr.datatypes as dt\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.ddl import (\n CTAS,\ndiff --git a/ibis/backends/impala/client.py b/ibis/backends/impala/client.py\nindex 6655ce7..78d526f 100644\n--- a/ibis/backends/impala/client.py\n+++ b/ibis/backends/impala/client.py\n@@ -10,7 +10,7 @@ import sqlalchemy as sa\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import Database\n from ibis.backends.base.sql.compiler import DDL, DML\n from ibis.backends.base.sql.ddl import (\ndiff --git a/ibis/backends/impala/pandas_interop.py b/ibis/backends/impala/pandas_interop.py\nindex f410a8b..e687884 100644\n--- a/ibis/backends/impala/pandas_interop.py\n+++ b/ibis/backends/impala/pandas_interop.py\n@@ -22,7 +22,7 @@ from posixpath import join as pjoin\n import ibis.backends.pandas.client # noqa: F401\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.config import options\n \n \ndiff --git a/ibis/backends/impala/tests/conftest.py b/ibis/backends/impala/tests/conftest.py\nindex 1075ebe..a815be5 100644\n--- a/ibis/backends/impala/tests/conftest.py\n+++ b/ibis/backends/impala/tests/conftest.py\n@@ -13,8 +13,7 @@ import pytest\n \n import ibis\n import ibis.expr.types as ir\n-import ibis.util as util\n-from ibis import options\n+from ibis import options, util\n from ibis.backends.base import BaseBackend\n from ibis.backends.conftest import TEST_TABLES, _random_identifier\n from ibis.backends.impala.compiler import ImpalaCompiler, ImpalaExprTranslator\ndiff --git a/ibis/backends/impala/tests/test_client.py b/ibis/backends/impala/tests/test_client.py\nindex 0b56054..3fcca3a 100644\n--- a/ibis/backends/impala/tests/test_client.py\n+++ b/ibis/backends/impala/tests/test_client.py\n@@ -7,9 +7,9 @@ import pytz\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_ddl.py b/ibis/backends/impala/tests/test_ddl.py\nindex 870c4dc..2346a3d 100644\n--- a/ibis/backends/impala/tests/test_ddl.py\n+++ b/ibis/backends/impala/tests/test_ddl.py\n@@ -6,7 +6,7 @@ import ibis\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.ddl import fully_qualified_re\n from ibis.tests.util import assert_equal\n \ndiff --git a/ibis/backends/impala/tests/test_exprs.py b/ibis/backends/impala/tests/test_exprs.py\nindex cfc8552..1d6f44f 100644\n--- a/ibis/backends/impala/tests/test_exprs.py\n+++ b/ibis/backends/impala/tests/test_exprs.py\n@@ -5,10 +5,10 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.types as ir\n from ibis import literal as L\n from ibis.backends.impala.compiler import ImpalaCompiler\n+from ibis.expr import api\n from ibis.expr.datatypes import Category\n \n \ndiff --git a/ibis/backends/impala/tests/test_partition.py b/ibis/backends/impala/tests/test_partition.py\nindex 1f96e7d..44217a4 100644\n--- a/ibis/backends/impala/tests/test_partition.py\n+++ b/ibis/backends/impala/tests/test_partition.py\n@@ -6,7 +6,7 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_udf.py b/ibis/backends/impala/tests/test_udf.py\nindex 895918b..fd950d5 100644\n--- a/ibis/backends/impala/tests/test_udf.py\n+++ b/ibis/backends/impala/tests/test_udf.py\n@@ -9,11 +9,11 @@ import ibis\n import ibis.backends.impala as api\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n-import ibis.expr.rules as rules\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.impala import ddl\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import rules\n \n pytest.importorskip(\"impala\")\n \ndiff --git a/ibis/backends/impala/udf.py b/ibis/backends/impala/udf.py\nindex c6f2ef6..8b8b552 100644\n--- a/ibis/backends/impala/udf.py\n+++ b/ibis/backends/impala/udf.py\n@@ -21,7 +21,7 @@ import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.udf.validate as v\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import fixed_arity, sql_type_names\n from ibis.backends.impala.compiler import ImpalaExprTranslator\n \ndiff --git a/ibis/backends/mysql/__init__.py b/ibis/backends/mysql/__init__.py\nindex c0ddacb..50b331a 100644\n--- a/ibis/backends/mysql/__init__.py\n+++ b/ibis/backends/mysql/__init__.py\n@@ -8,7 +8,7 @@ import warnings\n from typing import Literal\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/mysql/compiler.py b/ibis/backends/mysql/compiler.py\nindex 13819cb..7456f71 100644\n--- a/ibis/backends/mysql/compiler.py\n+++ b/ibis/backends/mysql/compiler.py\n@@ -1,7 +1,7 @@\n from __future__ import annotations\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\ndiff --git a/ibis/backends/postgres/tests/test_functions.py b/ibis/backends/postgres/tests/test_functions.py\nindex 33c6d2e..0f377e3 100644\n--- a/ibis/backends/postgres/tests/test_functions.py\n+++ b/ibis/backends/postgres/tests/test_functions.py\n@@ -11,9 +11,9 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis import literal as L\n from ibis.expr.window import rows_with_max_lookback\n \ndiff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py\nindex 1b42080..b994911 100644\n--- a/ibis/backends/pyspark/__init__.py\n+++ b/ibis/backends/pyspark/__init__.py\n@@ -14,8 +14,7 @@ import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.expr.types as types\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.compiler import Compiler, TableSetFormatter\n from ibis.backends.base.sql.ddl import (\n@@ -217,16 +216,16 @@ class Backend(BaseSQLBackend):\n **kwargs: Any,\n ) -> Any:\n \"\"\"Execute an expression.\"\"\"\n- if isinstance(expr, types.Table):\n+ if isinstance(expr, ir.Table):\n return self.compile(expr, timecontext, params, **kwargs).toPandas()\n- elif isinstance(expr, types.Column):\n+ elif isinstance(expr, ir.Column):\n # expression must be named for the projection\n if not expr.has_name():\n expr = expr.name(\"tmp\")\n return self.compile(\n expr.to_projection(), timecontext, params, **kwargs\n ).toPandas()[expr.get_name()]\n- elif isinstance(expr, types.Scalar):\n+ elif isinstance(expr, ir.Scalar):\n compiled = self.compile(expr, timecontext, params, **kwargs)\n if isinstance(compiled, Column):\n # attach result column to a fake DataFrame and\ndiff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py\nindex 0288062..ccc8a97 100644\n--- a/ibis/backends/pyspark/tests/test_ddl.py\n+++ b/ibis/backends/pyspark/tests/test_ddl.py\n@@ -5,7 +5,7 @@ import pytest\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pyspark = pytest.importorskip(\"pyspark\")\ndiff --git a/ibis/backends/sqlite/tests/test_client.py b/ibis/backends/sqlite/tests/test_client.py\nindex 95aa24d..ad64700 100644\n--- a/ibis/backends/sqlite/tests/test_client.py\n+++ b/ibis/backends/sqlite/tests/test_client.py\n@@ -5,8 +5,8 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.types as ir\n+from ibis import config\n \n pytest.importorskip(\"sqlalchemy\")\n \ndiff --git a/ibis/expr/format.py b/ibis/expr/format.py\nindex e3d48cd..85fab3f 100644\n--- a/ibis/expr/format.py\n+++ b/ibis/expr/format.py\n@@ -9,13 +9,13 @@ from typing import Any, Callable, Deque, Iterable, Mapping, Tuple\n import rich.pretty\n \n import ibis\n-import ibis.common.graph as graph\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n import ibis.expr.window as win\n-import ibis.util as util\n+from ibis import util\n+from ibis.common import graph\n \n Aliases = Mapping[ops.TableNode, int]\n Deps = Deque[Tuple[int, ops.TableNode]]\ndiff --git a/ibis/expr/operations/relations.py b/ibis/expr/operations/relations.py\nindex 080ddcd..de44a15 100644\n--- a/ibis/expr/operations/relations.py\n+++ b/ibis/expr/operations/relations.py\n@@ -11,7 +11,7 @@ import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute\n from ibis.expr.deferred import Deferred\n from ibis.expr.operations.core import Named, Node, Value\ndiff --git a/ibis/expr/rules.py b/ibis/expr/rules.py\nindex 9b1a3b7..d40700e 100644\n--- a/ibis/expr/rules.py\n+++ b/ibis/expr/rules.py\n@@ -11,7 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute, optional\n from ibis.common.validators import (\n bool_,\ndiff --git a/ibis/expr/timecontext.py b/ibis/expr/timecontext.py\nindex 7ecd8e7..9620d6c 100644\n--- a/ibis/expr/timecontext.py\n+++ b/ibis/expr/timecontext.py\n@@ -38,8 +38,8 @@ from typing import TYPE_CHECKING, Any\n import numpy as np\n \n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.operations as ops\n+from ibis import config\n \n if TYPE_CHECKING:\n import pandas as pd\ndiff --git a/ibis/expr/types/groupby.py b/ibis/expr/types/groupby.py\nindex 138f92e..97aaaa2 100644\n--- a/ibis/expr/types/groupby.py\n+++ b/ibis/expr/types/groupby.py\n@@ -22,7 +22,7 @@ from typing import Iterable, Sequence\n import ibis.expr.analysis as an\n import ibis.expr.types as ir\n import ibis.expr.window as _window\n-import ibis.util as util\n+from ibis import util\n from ibis.expr.deferred import Deferred\n \n _function_types = tuple(\ndiff --git a/ibis/expr/window.py b/ibis/expr/window.py\nindex 5ef3bb1..3e0efdc 100644\n--- a/ibis/expr/window.py\n+++ b/ibis/expr/window.py\n@@ -11,7 +11,7 @@ import toolz\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.exceptions import IbisInputError\n from ibis.common.grounds import Comparable\n \ndiff --git a/ibis/tests/expr/test_decimal.py b/ibis/tests/expr/test_decimal.py\nindex 85d8eb2..12b809b 100644\n--- a/ibis/tests/expr/test_decimal.py\n+++ b/ibis/tests/expr/test_decimal.py\n@@ -3,10 +3,10 @@ import operator\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_type_metadata(lineitem):\ndiff --git a/ibis/tests/expr/test_interactive.py b/ibis/tests/expr/test_interactive.py\nindex cea1945..0c5613b 100644\n--- a/ibis/tests/expr/test_interactive.py\n+++ b/ibis/tests/expr/test_interactive.py\n@@ -14,7 +14,7 @@\n \n import pytest\n \n-import ibis.config as config\n+from ibis import config\n from ibis.tests.expr.mocks import MockBackend\n \n \ndiff --git a/ibis/tests/expr/test_table.py b/ibis/tests/expr/test_table.py\nindex 04f4a7d..3f77985 100644\n--- a/ibis/tests/expr/test_table.py\n+++ b/ibis/tests/expr/test_table.py\n@@ -10,13 +10,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as an\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n from ibis import literal as L\n from ibis.common.exceptions import RelationError\n+from ibis.expr import api\n from ibis.expr.types import Column, Table\n from ibis.tests.expr.mocks import MockAlchemyBackend, MockBackend\n from ibis.tests.util import assert_equal, assert_pickle_roundtrip\ndiff --git a/ibis/tests/expr/test_temporal.py b/ibis/tests/expr/test_temporal.py\nindex e76e71c..9a0f43f 100644\n--- a/ibis/tests/expr/test_temporal.py\n+++ b/ibis/tests/expr/test_temporal.py\n@@ -5,10 +5,10 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_temporal_literals():\ndiff --git a/ibis/tests/expr/test_timestamp.py b/ibis/tests/expr/test_timestamp.py\nindex 6601c8b..7782787 100644\n--- a/ibis/tests/expr/test_timestamp.py\n+++ b/ibis/tests/expr/test_timestamp.py\n@@ -5,11 +5,11 @@ import pandas as pd\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_field_select(alltypes):\ndiff --git a/ibis/tests/expr/test_value_exprs.py b/ibis/tests/expr/test_value_exprs.py\nindex 4c3d475..9eb247c 100644\n--- a/ibis/tests/expr/test_value_exprs.py\n+++ b/ibis/tests/expr/test_value_exprs.py\n@@ -15,13 +15,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as L\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n from ibis import _, literal\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import api\n from ibis.tests.util import assert_equal\n \n \ndiff --git a/ibis/tests/expr/test_visualize.py b/ibis/tests/expr/test_visualize.py\nindex 5525944..253564f 100644\n--- a/ibis/tests/expr/test_visualize.py\n+++ b/ibis/tests/expr/test_visualize.py\n@@ -9,8 +9,8 @@ import ibis.expr.types as ir\n \n pytest.importorskip('graphviz')\n \n-import ibis.expr.api as api # noqa: E402\n import ibis.expr.visualize as viz # noqa: E402\n+from ibis.expr import api # noqa: E402\n \n pytestmark = pytest.mark.skipif(\n int(os.environ.get('CONDA_BUILD', 0)) == 1, reason='CONDA_BUILD defined'\ndiff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 2ad5453..3aa8c3d 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -15,8 +15,8 @@\n import operator\n \n import pytest\n-import sqlalchemy.sql as sql\n from sqlalchemy import func as F\n+from sqlalchemy import sql\n from sqlalchemy import types as sat\n \n import ibis\ndiff --git a/ibis/tests/util.py b/ibis/tests/util.py\nindex f79d09a..025bfc7 100644\n--- a/ibis/tests/util.py\n+++ b/ibis/tests/util.py\n@@ -5,7 +5,7 @@ from __future__ import annotations\n import pickle\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n \n \n def assert_equal(left, right):\ndiff --git a/pyproject.toml b/pyproject.toml\nindex f2146d4..492ad9e 100644\n--- a/pyproject.toml\n+++ b/pyproject.toml\n@@ -310,6 +310,7 @@ select = [\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n+ \"PLR\", # pylint import style\n \"PLW\", # pylint\n \"RET\", # flake8-return\n \"RUF\", # ruff-specific rules\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex f949506..6a10219 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.22\"\n+version = \"0.1.23-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.10\"\n+version = \"0.6.11-nightly.0\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.10\"\n+version = \"0.6.11-nightly.0\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -126,7 +126,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.10\"\n+version = \"0.6.11-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -134,7 +134,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.10\"\n+version = \"0.6.11-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex 04fdad7..ecc45e5 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.10\"\n+version = \"0.6.11-nightly.0\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full-repl = [\"erg_common/full-repl\"]\n full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.10\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.10\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.10\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.22\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.11-nightly.0\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.11-nightly.0\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.11-nightly.0\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.23-nightly.0\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex bc031e6..7c9455f 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.22\"\n+version = \"0.1.23-nightly.0\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js\nindex 9075d7c..9f16e67 100644\n--- a/src/notebook/epics/kernel-launch.js\n+++ b/src/notebook/epics/kernel-launch.js\n@@ -113,6 +113,12 @@ export function newKernelObservable(kernelSpec: KernelInfo, cwd: string) {\n observer.error({ type: 'ERROR', payload: error, err: true });\n observer.complete();\n });\n+ spawn.on('exit', () => {\n+ observer.complete();\n+ });\n+ spawn.on('disconnect', () => {\n+ observer.complete();\n+ });\n });\n });\n }\n", "diff --git a/engine.go b/engine.go\nindex 6cc1ff3..4617ceb 100644\n--- a/engine.go\n+++ b/engine.go\n@@ -67,6 +67,9 @@ func (e *engine) renderText(text string) {\n \tprefix := e.activeSegment.getValue(Prefix, \" \")\n \tpostfix := e.activeSegment.getValue(Postfix, \" \")\n \te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"%s%s%s\", prefix, text, postfix))\n+\tif *e.env.getArgs().Debug {\n+\t\te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"(%s:%s)\", e.activeSegment.Type, e.activeSegment.timing))\n+\t}\n }\n \n func (e *engine) renderSegmentText(text string) {\n@@ -107,13 +110,11 @@ func (e *engine) setStringValues(segments []*Segment) {\n \twg.Add(len(segments))\n \tdefer wg.Wait()\n \tcwd := e.env.getcwd()\n+\tdebug := *e.env.getArgs().Debug\n \tfor _, segment := range segments {\n \t\tgo func(s *Segment) {\n \t\t\tdefer wg.Done()\n-\t\t\terr := s.mapSegmentWithWriter(e.env)\n-\t\t\tif err == nil && !s.hasValue(IgnoreFolders, cwd) && s.enabled() {\n-\t\t\t\ts.stringValue = s.string()\n-\t\t\t}\n+\t\t\ts.setStringValue(e.env, cwd, debug)\n \t\t}(segment)\n \t}\n }\ndiff --git a/main.go b/main.go\nindex 56ae8a5..d67a640 100644\n--- a/main.go\n+++ b/main.go\n@@ -14,6 +14,7 @@ type args struct {\n \tConfig *string\n \tShell *string\n \tPWD *string\n+\tDebug *bool\n }\n \n func main() {\n@@ -42,6 +43,10 @@ func main() {\n \t\t\t\"pwd\",\n \t\t\t\"\",\n \t\t\t\"the path you are working in\"),\n+\t\tDebug: flag.Bool(\n+\t\t\t\"debug\",\n+\t\t\tfalse,\n+\t\t\t\"Print debug information\"),\n \t}\n \tflag.Parse()\n \tenv := &environment{\ndiff --git a/packages/powershell/oh-my-posh/oh-my-posh.psm1 b/packages/powershell/oh-my-posh/oh-my-posh.psm1\nindex 9234fc6..1450eb3 100644\n--- a/packages/powershell/oh-my-posh/oh-my-posh.psm1\n+++ b/packages/powershell/oh-my-posh/oh-my-posh.psm1\n@@ -5,6 +5,7 @@\n \n $global:PoshSettings = New-Object -TypeName PSObject -Property @{\n Theme = \"$PSScriptRoot\\themes\\jandedobbeleer.json\";\n+ ShowDebug = $false\n }\n \n function Get-PoshCommand {\n@@ -36,9 +37,14 @@ function Set-PoshPrompt {\n param(\n [Parameter(Mandatory = $false)]\n [string]\n- $Theme\n+ $Theme,\n+ [Parameter(Mandatory = $false)]\n+ [bool]\n+ $ShowDebug = $false\n )\n \n+ $global:PoshSettings.ShowDebug = $ShowDebug\n+\n if (Test-Path \"$PSScriptRoot/themes/$Theme.json\") {\n $global:PoshSettings.Theme = \"$PSScriptRoot/themes/$Theme.json\"\n }\n@@ -68,8 +74,9 @@ function Set-PoshPrompt {\n $startInfo = New-Object System.Diagnostics.ProcessStartInfo\n $startInfo.FileName = Get-PoshCommand\n $config = $global:PoshSettings.Theme\n+ $showDebug = $global:PoshSettings.ShowDebug\n $cleanPWD = $PWD.ProviderPath.TrimEnd(\"\\\")\n- $startInfo.Arguments = \"-config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n+ $startInfo.Arguments = \"-debug=\"\"$showDebug\"\" -config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n $startInfo.Environment[\"TERM\"] = \"xterm-256color\"\n $startInfo.CreateNoWindow = $true\n $startInfo.StandardOutputEncoding = [System.Text.Encoding]::UTF8\ndiff --git a/segment.go b/segment.go\nindex 27dd416..4015dac 100644\n--- a/segment.go\n+++ b/segment.go\n@@ -1,6 +1,9 @@\n package main\n \n-import \"errors\"\n+import (\n+\t\"errors\"\n+\t\"time\"\n+)\n \n // Segment represent a single segment and it's configuration\n type Segment struct {\n@@ -17,6 +20,7 @@ type Segment struct {\n \twriter SegmentWriter\n \tstringValue string\n \tactive bool\n+\ttiming time.Duration\n }\n \n // SegmentWriter is the interface used to define what and if to write to the prompt\n@@ -149,3 +153,26 @@ func (segment *Segment) mapSegmentWithWriter(env environmentInfo) error {\n \t}\n \treturn errors.New(\"unable to map writer\")\n }\n+\n+func (segment *Segment) setStringValue(env environmentInfo, cwd string, debug bool) {\n+\terr := segment.mapSegmentWithWriter(env)\n+\tif err != nil || segment.hasValue(IgnoreFolders, cwd) {\n+\t\treturn\n+\t}\n+\t// add timing only in debug\n+\tif debug {\n+\t\tstart := time.Now()\n+\t\tdefer (func() {\n+\t\t\t// force segment rendering to display the time it took\n+\t\t\t// to check if the segment is enabled or not\n+\t\t\t// depending on the segement, calling enabled()\n+\t\t\t// can be time consuming\n+\t\t\tsegment.active = true\n+\t\t\telapsed := time.Since(start)\n+\t\t\tsegment.timing = elapsed\n+\t\t})()\n+\t}\n+\tif segment.enabled() {\n+\t\tsegment.stringValue = segment.string()\n+\t}\n+}\n", "diff --git a/docs/getting-started/getting-started.md b/docs/getting-started/getting-started.md\nindex dc6db37..3ef9d0a 100644\n--- a/docs/getting-started/getting-started.md\n+++ b/docs/getting-started/getting-started.md\n@@ -13,7 +13,7 @@ npm install -g @angular/cli\n **Using `npx`**\n \n ```bash\n-npx create-nx-workspace myworkspace\n+npx --ignore-existing create-nx-workspace myworkspace\n ```\n \n **Using `npm init`**\ndiff --git a/docs/guides/react-and-angular.md b/docs/guides/react-and-angular.md\nindex c1929a2..a5651ff 100644\n--- a/docs/guides/react-and-angular.md\n+++ b/docs/guides/react-and-angular.md\n@@ -11,7 +11,7 @@ To show how Nx does it, let's build two applications (one in Angular, and one in\n Let's start by creating a new Nx workspace. The easiest way to do this is to use npx.\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=empty\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\n ```\n \n ## Creating an Angular Application\ndiff --git a/docs/guides/react.md b/docs/guides/react.md\nindex e1647fd..eac848e 100644\n--- a/docs/guides/react.md\n+++ b/docs/guides/react.md\n@@ -16,13 +16,13 @@ Nx has first class support for React: you can create React applications and libr\n Create a new Nx workspace. The easiest way to do it is to use npx.\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=empty\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\n ```\n \n You can also create a workspace with a React application in place by running:\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=react\n+npx --ignore-existing create-nx-workspace happynrwl --preset=react\n ```\n \n ## Generating a React Application\ndiff --git a/docs/tutorial/01-create-application.md b/docs/tutorial/01-create-application.md\nindex ea87ecf..967a56e 100644\n--- a/docs/tutorial/01-create-application.md\n+++ b/docs/tutorial/01-create-application.md\n@@ -7,7 +7,7 @@ In this tutorial you will use Nx to build a full-stack application out of common\n **Start by creating a new workspace.**\n \n ```bash\n-npx create-nx-workspace myorg\n+npx --ignore-existing create-nx-workspace myorg\n ```\n \n When asked about 'preset', select `empty`.\n"]
5
["8d53d724275ebe4b2a0bb0bd7e2c2dfc399e049b", "607ecc92b5f8c084304e406eec725b7dcfa0a562", "a280a52c8309465276c3509848ddcddbe19732b6", "bea32587586ca08f390c901a95e9b9c25263f4df", "fc9af4d0b93d69be4e201ffb18da04324e8a4a87"]
["refactor", "build", "fix", "feat", "docs"]
add prewatch script to core,extract lambdas,alerts do not trigger modal lifecycle events fixes #8616,await job creation to ensure asserted event sequence,rename step
["diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n", "diff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\nindex 6ee5797..bcfcc72 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n@@ -224,7 +224,6 @@ public final class AsyncSnapshotDirector extends Actor\n private void takeSnapshot() {\n final var transientSnapshotFuture =\n stateController.takeTransientSnapshot(lowerBoundSnapshotPosition);\n-\n transientSnapshotFuture.onComplete(\n (optionalTransientSnapshot, snapshotTakenError) -> {\n if (snapshotTakenError != null) {\n@@ -237,27 +236,31 @@ public final class AsyncSnapshotDirector extends Actor\n takingSnapshot = false;\n return;\n }\n- pendingSnapshot = optionalTransientSnapshot.get();\n- onRecovered();\n-\n- final ActorFuture<Long> lastWrittenPosition =\n- streamProcessor.getLastWrittenPositionAsync();\n- actor.runOnCompletion(\n- lastWrittenPosition,\n- (endPosition, error) -> {\n- if (error == null) {\n- LOG.info(LOG_MSG_WAIT_UNTIL_COMMITTED, endPosition, commitPosition);\n- lastWrittenEventPosition = endPosition;\n- persistingSnapshot = false;\n- persistSnapshotIfLastWrittenPositionCommitted();\n- } else {\n- resetStateOnFailure();\n- LOG.error(ERROR_MSG_ON_RESOLVE_WRITTEN_POS, error);\n- }\n- });\n+ onTransientSnapshotTaken(optionalTransientSnapshot.get());\n });\n }\n \n+ private void onTransientSnapshotTaken(final TransientSnapshot transientSnapshot) {\n+\n+ pendingSnapshot = transientSnapshot;\n+ onRecovered();\n+\n+ final ActorFuture<Long> lastWrittenPosition = streamProcessor.getLastWrittenPositionAsync();\n+ actor.runOnCompletion(lastWrittenPosition, this::onLastWrittenPositionReceived);\n+ }\n+\n+ private void onLastWrittenPositionReceived(final Long endPosition, final Throwable error) {\n+ if (error == null) {\n+ LOG.info(LOG_MSG_WAIT_UNTIL_COMMITTED, endPosition, commitPosition);\n+ lastWrittenEventPosition = endPosition;\n+ persistingSnapshot = false;\n+ persistSnapshotIfLastWrittenPositionCommitted();\n+ } else {\n+ resetStateOnFailure();\n+ LOG.error(ERROR_MSG_ON_RESOLVE_WRITTEN_POS, error);\n+ }\n+ }\n+\n private void onRecovered() {\n if (healthStatus != HealthStatus.HEALTHY) {\n healthStatus = HealthStatus.HEALTHY;\n", "diff --git a/src/components/app/app-root.ts b/src/components/app/app-root.ts\nindex ec7daee..29dc797 100644\n--- a/src/components/app/app-root.ts\n+++ b/src/components/app/app-root.ts\n@@ -15,6 +15,7 @@ export const AppRootToken = new OpaqueToken('USERROOT');\n selector: 'ion-app',\n template:\n '<div #viewport app-viewport></div>' +\n+ '<div #modalPortal overlay-portal></div>' +\n '<div #overlayPortal overlay-portal></div>' +\n '<div #loadingPortal class=\"loading-portal\" overlay-portal></div>' +\n '<div #toastPortal class=\"toast-portal\" overlay-portal></div>' +\n@@ -24,6 +25,8 @@ export class IonicApp extends Ion implements OnInit {\n \n @ViewChild('viewport', {read: ViewContainerRef}) _viewport: ViewContainerRef;\n \n+ @ViewChild('modalPortal', { read: OverlayPortal }) _modalPortal: OverlayPortal;\n+\n @ViewChild('overlayPortal', { read: OverlayPortal }) _overlayPortal: OverlayPortal;\n \n @ViewChild('loadingPortal', { read: OverlayPortal }) _loadingPortal: OverlayPortal;\n@@ -96,6 +99,9 @@ export class IonicApp extends Ion implements OnInit {\n if (portal === AppPortal.TOAST) {\n return this._toastPortal;\n }\n+ if (portal === AppPortal.MODAL) {\n+ return this._modalPortal;\n+ }\n return this._overlayPortal;\n }\n \n@@ -110,6 +116,7 @@ export class IonicApp extends Ion implements OnInit {\n \n export enum AppPortal {\n DEFAULT,\n+ MODAL,\n LOADING,\n TOAST\n };\ndiff --git a/src/components/modal/modal.ts b/src/components/modal/modal.ts\nindex bd4d406..c3e7a62 100644\n--- a/src/components/modal/modal.ts\n+++ b/src/components/modal/modal.ts\n@@ -1,6 +1,7 @@\n import { Injectable } from '@angular/core';\n \n import { App } from '../app/app';\n+import { AppPortal } from '../app/app-root';\n import { isPresent } from '../../util/util';\n import { ModalCmp } from './modal-component';\n import { ModalOptions } from './modal-options';\n@@ -40,7 +41,7 @@ export class Modal extends ViewController {\n * @returns {Promise} Returns a promise which is resolved when the transition has completed.\n */\n present(navOptions: NavOptions = {}) {\n- return this._app.present(this, navOptions);\n+ return this._app.present(this, navOptions, AppPortal.MODAL);\n }\n \n /**\n", "diff --git a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\nindex 9ffa1fa..4333db0 100644\n--- a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n@@ -114,12 +114,18 @@ public class BoundaryEventTest {\n ENGINE.deployment().withXmlResource(MULTIPLE_SEQUENCE_FLOWS).deploy();\n final long workflowInstanceKey = ENGINE.workflowInstance().ofBpmnProcessId(PROCESS_ID).create();\n \n- // when\n RecordingExporter.timerRecords()\n .withHandlerNodeId(\"timer\")\n .withIntent(TimerIntent.CREATED)\n .withWorkflowInstanceKey(workflowInstanceKey)\n .getFirst();\n+\n+ RecordingExporter.jobRecords(JobIntent.CREATED)\n+ .withType(\"type\")\n+ .withWorkflowInstanceKey(workflowInstanceKey)\n+ .getFirst();\n+\n+ // when\n ENGINE.increaseTime(Duration.ofMinutes(1));\n \n // then\n", "diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n"]
5
["aa0152baa4376b1087c86499a7c289b668d5ad55", "14abf5c31523a551134aebe9e8f3505ef26ed421", "e2704a4a25b9e348764e1cc922ca7d6a927550eb", "a8d1a60fd48d3fbd76d4271987a1b0f538d498f1", "34875bc0e59b43d9041903101c823d25ec194a21"]
["build", "refactor", "fix", "test", "ci"]
Add the select function for logicflow,Downgrade @azure/* deps for Node.sj 10 compability,do not check mkdocs for older versions used in deployments,remove broken link Fixes #1785,add activatedElementInstanceKeys to modification record
["diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\nindex 0d913b7..dcc59b3 100644\n--- a/packages/core/src/LogicFlow.tsx\n+++ b/packages/core/src/LogicFlow.tsx\n@@ -276,6 +276,12 @@ export default class LogicFlow {\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\n }\n /**\n+ * \u5c06\u56fe\u5f62\u9009\u4e2d\n+ */\n+ select(id: string) {\n+ this.graphModel.selectElementById(id);\n+ }\n+ /**\n * \u5c06\u56fe\u5f62\u5b9a\u4f4d\u5230\u753b\u5e03\u4e2d\u5fc3\n * @param focusOnArgs \u652f\u6301\u7528\u6237\u4f20\u5165\u56fe\u5f62\u5f53\u524d\u7684\u5750\u6807\u6216id\uff0c\u53ef\u4ee5\u901a\u8fc7type\u6765\u533a\u5206\u662f\u8282\u70b9\u8fd8\u662f\u8fde\u7ebf\u7684id\uff0c\u4e5f\u53ef\u4ee5\u4e0d\u4f20\uff08\u515c\u5e95\uff09\n */\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\nindex 94d0899..10280a9 100644\n--- a/packages/core/src/model/GraphModel.ts\n+++ b/packages/core/src/model/GraphModel.ts\n@@ -481,6 +481,13 @@ class GraphModel {\n this.selectElement?.setSelected(true);\n }\n \n+ @action\n+ selectElementById(id: string) {\n+ this.selectElement?.setSelected(false);\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\n+ this.selectElement?.setSelected(true);\n+ }\n+\n /* \u4fee\u6539\u8fde\u7ebf\u7c7b\u578b */\n @action\n changeEdgeType(type: string): void {\n", "diff --git a/package.json b/package.json\nindex 911f8cd..ac29f54 100644\n--- a/package.json\n+++ b/package.json\n@@ -79,7 +79,13 @@\n \"resolutions\": {\n \"@types/ramda\": \"0.27.40\",\n \"rc-tree\": \"4.1.5\",\n+ \"@azure/storage-blob\": \"12.7.0\",\n+ \"@azure/core-paging\": \"1.1.3\",\n+ \"@azure/logger\": \"1.0.0\",\n \"@azure/core-auth\": \"1.2.0\",\n+ \"@azure/core-lro\": \"1.0.5\",\n+ \"@azure/core-tracing\": \"1.0.0-preview.10\",\n+ \"@azure/core-http\": \"1.2.6\",\n \"testcontainers\": \"7.12.1\"\n },\n \"license\": \"MIT\"\ndiff --git a/yarn.lock b/yarn.lock\nindex 5019f68..99235b5 100644\n--- a/yarn.lock\n+++ b/yarn.lock\n@@ -1144,19 +1144,19 @@\n \"@azure/abort-controller\" \"^1.0.0\"\n tslib \"^2.0.0\"\n \n-\"@azure/core-http@^2.0.0\":\n- version \"2.2.2\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-http/-/core-http-2.2.2.tgz#573798f087d808d39aa71fd7c52b8d7b89f440da\"\n- integrity sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==\n+\"@azure/[email protected]\", \"@azure/core-http@^1.2.0\", \"@azure/core-http@^2.0.0\":\n+ version \"1.2.6\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-http/-/core-http-1.2.6.tgz#9cd508418572d2062fd3175274219438772bdb65\"\n+ integrity sha512-odtH7UMKtekc5YQ86xg9GlVHNXR6pq2JgJ5FBo7/jbOjNGdBqcrIVrZx2bevXVJz/uUTSx6vUf62gzTXTfqYSQ==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n \"@azure/core-asynciterator-polyfill\" \"^1.0.0\"\n \"@azure/core-auth\" \"^1.3.0\"\n- \"@azure/core-tracing\" \"1.0.0-preview.13\"\n+ \"@azure/core-tracing\" \"1.0.0-preview.11\"\n \"@azure/logger\" \"^1.0.0\"\n \"@types/node-fetch\" \"^2.5.0\"\n- \"@types/tunnel\" \"^0.0.3\"\n- form-data \"^4.0.0\"\n+ \"@types/tunnel\" \"^0.0.1\"\n+ form-data \"^3.0.0\"\n node-fetch \"^2.6.0\"\n process \"^0.11.10\"\n tough-cookie \"^4.0.0\"\n@@ -1165,38 +1165,39 @@\n uuid \"^8.3.0\"\n xml2js \"^0.4.19\"\n \n-\"@azure/core-lro@^2.2.0\":\n- version \"2.2.1\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-lro/-/core-lro-2.2.1.tgz#5527b41037c658d3aefc19d68633e51e53d6e6a3\"\n- integrity sha512-HE6PBl+mlKa0eBsLwusHqAqjLc5n9ByxeDo3Hz4kF3B1hqHvRkBr4oMgoT6tX7Hc3q97KfDctDUon7EhvoeHPA==\n+\"@azure/[email protected]\", \"@azure/core-lro@^2.0.0\":\n+ version \"1.0.5\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-lro/-/core-lro-1.0.5.tgz#856a2cb6a9bec739ee9cde33a27cc28f81ac0522\"\n+ integrity sha512-0EFCFZxARrIoLWMIRt4vuqconRVIO2Iin7nFBfJiYCCbKp5eEmxutNk8uqudPmG0XFl5YqlVh68/al/vbE5OOg==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n- \"@azure/core-tracing\" \"1.0.0-preview.13\"\n- \"@azure/logger\" \"^1.0.0\"\n- tslib \"^2.2.0\"\n+ \"@azure/core-http\" \"^1.2.0\"\n+ \"@azure/core-tracing\" \"1.0.0-preview.11\"\n+ events \"^3.0.0\"\n+ tslib \"^2.0.0\"\n \n-\"@azure/core-paging@^1.1.1\":\n- version \"1.2.0\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-paging/-/core-paging-1.2.0.tgz#3754da429e8687bdc3613c750e79a564582e802b\"\n- integrity sha512-ZX1bCjm/MjKPCN6kQD/9GJErYSoKA8YWp6YWoo5EIzcTWlSBLXu3gNaBTUl8usGl+UShiKo7b4Gdy1NSTIlpZg==\n+\"@azure/[email protected]\", \"@azure/core-paging@^1.1.1\":\n+ version \"1.1.3\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-paging/-/core-paging-1.1.3.tgz#3587c9898a0530cacb64bab216d7318468aa5efc\"\n+ integrity sha512-his7Ah40ThEYORSpIAwuh6B8wkGwO/zG7gqVtmSE4WAJ46e36zUDXTKReUCLBDc6HmjjApQQxxcRFy5FruG79A==\n dependencies:\n \"@azure/core-asynciterator-polyfill\" \"^1.0.0\"\n- tslib \"^2.2.0\"\n \n-\"@azure/[email protected]\":\n- version \"1.0.0-preview.13\"\n- resolved \"https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz#55883d40ae2042f6f1e12b17dd0c0d34c536d644\"\n- integrity sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==\n+\"@azure/[email protected]\", \"@azure/[email protected]\", \"@azure/[email protected]\":\n+ version \"1.0.0-preview.10\"\n+ resolved \"https://registry.yarnpkg.com/@azure/core-tracing/-/core-tracing-1.0.0-preview.10.tgz#e7060272145dddad4486765030d1b037cd52a8ea\"\n+ integrity sha512-iIwjtMwQnsxB7cYkugMx+s4W1nfy3+pT/ceo+uW1fv4YDgYe84nh+QP0fEC9IH/3UATLSWbIBemdMHzk2APUrw==\n dependencies:\n- \"@opentelemetry/api\" \"^1.0.1\"\n- tslib \"^2.2.0\"\n+ \"@opencensus/web-types\" \"0.0.7\"\n+ \"@opentelemetry/api\" \"^0.10.2\"\n+ tslib \"^2.0.0\"\n \n-\"@azure/logger@^1.0.0\":\n- version \"1.0.3\"\n- resolved \"https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.3.tgz#6e36704aa51be7d4a1bae24731ea580836293c96\"\n- integrity sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==\n+\"@azure/[email protected]\", \"@azure/logger@^1.0.0\":\n+ version \"1.0.0\"\n+ resolved \"https://registry.yarnpkg.com/@azure/logger/-/logger-1.0.0.tgz#48b371dfb34288c8797e5c104f6c4fb45bf1772c\"\n+ integrity sha512-g2qLDgvmhyIxR3JVS8N67CyIOeFRKQlX/llxYJQr1OSGQqM3HTpVP8MjmjcEKbL/OIt2N9C9UFaNQuKOw1laOA==\n dependencies:\n- tslib \"^2.2.0\"\n+ tslib \"^1.9.3\"\n \n \"@azure/ms-rest-azure-env@^2.0.0\":\n version \"2.0.0\"\n@@ -1227,19 +1228,19 @@\n \"@azure/ms-rest-js\" \"^2.0.4\"\n adal-node \"^0.2.2\"\n \n-\"@azure/storage-blob@^12.5.0\":\n- version \"12.8.0\"\n- resolved \"https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.8.0.tgz#97b7ecc6c7b17bcbaf0281c79c16af6f512d6130\"\n- integrity sha512-c8+Wz19xauW0bGkTCoqZH4dYfbtBniPiGiRQOn1ca6G5jsjr4azwaTk9gwjVY8r3vY2Taf95eivLzipfIfiS4A==\n+\"@azure/[email protected]\", \"@azure/storage-blob@^12.5.0\":\n+ version \"12.7.0\"\n+ resolved \"https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.7.0.tgz#f17f278000a46bca516e5864d846cd8fa57d6d7d\"\n+ integrity sha512-7YEWEx03Us/YBxthzBv788R7jokwpCD5KcIsvtE5xRaijNX9o80KXpabhEwLR9DD9nmt/AlU/c1R+aXydgCduQ==\n dependencies:\n \"@azure/abort-controller\" \"^1.0.0\"\n \"@azure/core-http\" \"^2.0.0\"\n- \"@azure/core-lro\" \"^2.2.0\"\n+ \"@azure/core-lro\" \"^2.0.0\"\n \"@azure/core-paging\" \"^1.1.1\"\n \"@azure/core-tracing\" \"1.0.0-preview.13\"\n \"@azure/logger\" \"^1.0.0\"\n events \"^3.0.0\"\n- tslib \"^2.2.0\"\n+ tslib \"^2.0.0\"\n \n \"@babel/cli@^7.5.5\":\n version \"7.16.0\"\n@@ -2888,9 +2889,9 @@\n integrity sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw==\n \n \"@google-cloud/bigquery@^5.6.0\":\n- version \"5.9.1\"\n- resolved \"https://registry.yarnpkg.com/@google-cloud/bigquery/-/bigquery-5.9.1.tgz#96cee86fa0caef4a7e1470efde9295bc09f5981f\"\n- integrity sha512-80pMzhAC299CSiXW9TvR8AARLaPRDeQg8pSAvrVcLXcUkx1hWvVx2m94nBZ4KUoZb4LVWIHHYhvFB6XvIcxqjw==\n+ version \"5.9.2\"\n+ resolved \"https://registry.yarnpkg.com/@google-cloud/bigquery/-/bigquery-5.9.2.tgz#d53eac984fdd256d31be490762157e5f6c5b82c3\"\n+ integrity sha512-lJiMsSekcnhrzzR9e48yx8iOx+ElP3r/wOoionXL6eDPbA41RgP12if5NmMqHZzfWdKlWV2plspEPrbjhJAzCw==\n dependencies:\n \"@google-cloud/common\" \"^3.1.0\"\n \"@google-cloud/paginator\" \"^3.0.0\"\n@@ -4831,11 +4832,28 @@\n resolved \"https://registry.yarnpkg.com/@oozcitak/util/-/util-8.3.8.tgz#10f65fe1891fd8cde4957360835e78fd1936bfdd\"\n integrity sha512-T8TbSnGsxo6TDBJx/Sgv/BlVJL3tshxZP7Aq5R1mSnM5OcHY2dQaxLMu2+E8u3gN0MLOzdjurqN4ZRVuzQycOQ==\n \n-\"@opentelemetry/api@^1.0.0\", \"@opentelemetry/api@^1.0.1\":\n+\"@opencensus/[email protected]\":\n+ version \"0.0.7\"\n+ resolved \"https://registry.yarnpkg.com/@opencensus/web-types/-/web-types-0.0.7.tgz#4426de1fe5aa8f624db395d2152b902874f0570a\"\n+ integrity sha512-xB+w7ZDAu3YBzqH44rCmG9/RlrOmFuDPt/bpf17eJr8eZSrLt7nc7LnWdxM9Mmoj/YKMHpxRg28txu3TcpiL+g==\n+\n+\"@opentelemetry/api@^0.10.2\":\n+ version \"0.10.2\"\n+ resolved \"https://registry.yarnpkg.com/@opentelemetry/api/-/api-0.10.2.tgz#9647b881f3e1654089ff7ea59d587b2d35060654\"\n+ integrity sha512-GtpMGd6vkzDMYcpu2t9LlhEgMy/SzBwRnz48EejlRArYqZzqSzAsKmegUK7zHgl+EOIaK9mKHhnRaQu3qw20cA==\n+ dependencies:\n+ \"@opentelemetry/context-base\" \"^0.10.2\"\n+\n+\"@opentelemetry/api@^1.0.0\":\n version \"1.0.3\"\n resolved \"https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.0.3.tgz#13a12ae9e05c2a782f7b5e84c3cbfda4225eaf80\"\n integrity sha512-puWxACExDe9nxbBB3lOymQFrLYml2dVOrd7USiVRnSbgXE+KwBu+HxFvxrzfqsiSda9IWsXJG1ef7C1O2/GmKQ==\n \n+\"@opentelemetry/context-base@^0.10.2\":\n+ version \"0.10.2\"\n+ resolved \"https://registry.yarnpkg.com/@opentelemetry/context-base/-/context-base-0.10.2.tgz#55bea904b2b91aa8a8675df9eaba5961bddb1def\"\n+ integrity sha512-hZNKjKOYsckoOEgBziGMnBcX0M7EtstnCmwz5jZUOUYwlZ+/xxX6z3jPu1XVO2Jivk0eLfuP9GP+vFD49CMetw==\n+\n \"@opentelemetry/semantic-conventions@^0.24.0\":\n version \"0.24.0\"\n resolved \"https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-0.24.0.tgz#1028ef0e0923b24916158d80d2ddfd67ea8b6740\"\n@@ -5564,9 +5582,9 @@\n integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\n \n \"@types/jsonwebtoken@^8.5.0\":\n- version \"8.5.5\"\n- resolved \"https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.5.tgz#da5f2f4baee88f052ef3e4db4c1a0afb46cff22c\"\n- integrity sha512-OGqtHQ7N5/Ap/TUwO6IgHDuLiAoTmHhGpNvgkCm/F4N6pKzx/RBSfr2OXZSwC6vkfnsEdb6+7DNZVtiXiwdwFw==\n+ version \"8.5.6\"\n+ resolved \"https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.6.tgz#1913e5a61e70a192c5a444623da4901a7b1a9d42\"\n+ integrity sha512-+P3O/xC7nzVizIi5VbF34YtqSonFsdnbXBnWUCYRiKOi1f9gA4sEFvXkrGr/QVV23IbMYvcoerI7nnhDUiWXRQ==\n dependencies:\n \"@types/node\" \"*\"\n \n@@ -5753,18 +5771,18 @@\n \"@types/react\" \"*\"\n \n \"@types/react@*\", \"@types/react@^17.0.3\":\n- version \"17.0.34\"\n- resolved \"https://registry.yarnpkg.com/@types/react/-/react-17.0.34.tgz#797b66d359b692e3f19991b6b07e4b0c706c0102\"\n- integrity sha512-46FEGrMjc2+8XhHXILr+3+/sTe3OfzSPU9YGKILLrUYbQ1CLQC9Daqo1KzENGXAWwrFwiY0l4ZbF20gRvgpWTg==\n+ version \"17.0.35\"\n+ resolved \"https://registry.yarnpkg.com/@types/react/-/react-17.0.35.tgz#217164cf830267d56cd1aec09dcf25a541eedd4c\"\n+ integrity sha512-r3C8/TJuri/SLZiiwwxQoLAoavaczARfT9up9b4Jr65+ErAUX3MIkU0oMOQnrpfgHme8zIqZLX7O5nnjm5Wayw==\n dependencies:\n \"@types/prop-types\" \"*\"\n \"@types/scheduler\" \"*\"\n csstype \"^3.0.2\"\n \n \"@types/react@^16.9.41\":\n- version \"16.14.20\"\n- resolved \"https://registry.yarnpkg.com/@types/react/-/react-16.14.20.tgz#ff6e932ad71d92c27590e4a8667c7a53a7d0baad\"\n- integrity sha512-SV7TaVc8e9E/5Xuv6TIyJ5VhQpZoVFJqX6IZgj5HZoFCtIDCArE3qXkcHlc6O/Ud4UwcMoX+tlvDA95YrKdLgA==\n+ version \"16.14.21\"\n+ resolved \"https://registry.yarnpkg.com/@types/react/-/react-16.14.21.tgz#35199b21a278355ec7a3c40003bd6a334bd4ae4a\"\n+ integrity sha512-rY4DzPKK/4aohyWiDRHS2fotN5rhBSK6/rz1X37KzNna9HJyqtaGAbq9fVttrEPWF5ywpfIP1ITL8Xi2QZn6Eg==\n dependencies:\n \"@types/prop-types\" \"*\"\n \"@types/scheduler\" \"*\"\n@@ -5950,10 +5968,10 @@\n resolved \"https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.1.tgz#8f80dd965ad81f3e1bc26d6f5c727e132721ff40\"\n integrity sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==\n \n-\"@types/tunnel@^0.0.3\":\n- version \"0.0.3\"\n- resolved \"https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.3.tgz#f109e730b072b3136347561fc558c9358bb8c6e9\"\n- integrity sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==\n+\"@types/tunnel@^0.0.1\":\n+ version \"0.0.1\"\n+ resolved \"https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.1.tgz#0d72774768b73df26f25df9184273a42da72b19c\"\n+ integrity sha512-AOqu6bQu5MSWwYvehMXLukFHnupHrpZ8nvgae5Ggie9UwzDR1CCwoXgSSWNZJuyOlCdfdsWMA5F2LlmvyoTv8A==\n dependencies:\n \"@types/node\" \"*\"\n \n@@ -5999,9 +6017,9 @@\n source-map \"^0.6.1\"\n \n \"@types/webpack@^4\", \"@types/webpack@^4.0.0\", \"@types/webpack@^4.41.8\":\n- version \"4.41.31\"\n- resolved \"https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.31.tgz#c35f252a3559ddf9c85c0d8b0b42019025e581aa\"\n- integrity sha512-/i0J7sepXFIp1ZT7FjUGi1eXMCg8HCCzLJEQkKsOtbJFontsJLolBcDC+3qxn5pPwiCt1G0ZdRmYRzNBtvpuGQ==\n+ version \"4.41.32\"\n+ resolved \"https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.32.tgz#a7bab03b72904070162b2f169415492209e94212\"\n+ integrity sha512-cb+0ioil/7oz5//7tZUSwbrSAN/NWHrQylz5cW8G0dWTcF/g+/dSdMlKVZspBYuMAN1+WnwHrkxiRrLcwd0Heg==\n dependencies:\n \"@types/node\" \"*\"\n \"@types/tapable\" \"^1\"\n@@ -7624,9 +7642,9 @@ autoprefixer@^9.6.1, autoprefixer@^9.6.5, autoprefixer@^9.8.6:\n postcss-value-parser \"^4.1.0\"\n \n aws-sdk@^2.404.0, aws-sdk@^2.787.0, aws-sdk@^2.819.0, aws-sdk@^2.878.0:\n- version \"2.1028.0\"\n- resolved \"https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1028.0.tgz#ce076076174afa9bd311406b8186ea90163e3331\"\n- integrity sha512-OmR0NcpU8zsDcUOZhM+eZ6CzlUFtuaEuRyjm6mxDO0KI7lJAp7/NzB6tcellRrgWxL+NO7b5TSxi+m28qu5ocQ==\n+ version \"2.1029.0\"\n+ resolved \"https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1029.0.tgz#702d4d6092adcf0ceaf37ae0da6fee07a71f39dd\"\n+ integrity sha512-nCmaMPkJr3EATXaeqR3JeNC0GTDH2lJZ3Xq/ZCAW+yrfaPQWv8HqJJHBCNGtmk3FmcCoxc7ed/gEB8XSl0tocA==\n dependencies:\n buffer \"4.9.2\"\n events \"1.1.1\"\n@@ -8596,11 +8614,16 @@ [email protected]:\n resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048\"\n integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=\n \[email protected], bytes@^3.1.0:\[email protected]:\n version \"3.1.0\"\n resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6\"\n integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==\n \n+bytes@^3.1.0:\n+ version \"3.1.1\"\n+ resolved \"https://registry.yarnpkg.com/bytes/-/bytes-3.1.1.tgz#3f018291cb4cbad9accb6e6970bca9c8889e879a\"\n+ integrity sha512-dWe4nWO/ruEOY7HkUJ5gFt1DCFV9zPRoJr8pV0/ASQermOZjtq8jMjOprC0Kd10GLN+l7xaUPvxzJFWtxGu8Fg==\n+\n [email protected]:\n version \"15.0.3\"\n resolved \"https://registry.yarnpkg.com/cacache/-/cacache-15.0.3.tgz#2225c2d1dd8e872339950d6a39c051e0e9334392\"\n@@ -11359,9 +11382,9 @@ ejs@^2.6.1:\n integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==\n \n electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.896:\n- version \"1.3.896\"\n- resolved \"https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.896.tgz#4a94efe4870b1687eafd5c378198a49da06e8a1b\"\n- integrity sha512-NcGkBVXePiuUrPLV8IxP43n1EOtdg+dudVjrfVEUd/bOqpQUFZ2diL5PPYzbgEhZFEltdXV3AcyKwGnEQ5lhMA==\n+ version \"1.3.899\"\n+ resolved \"https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.899.tgz#4d7d040e73def3d5f5bd6b8a21049025dce6fce0\"\n+ integrity sha512-w16Dtd2zl7VZ4N4Db+FIa7n36sgPGCKjrKvUUmp5ialsikvcQLjcJR9RWnlYNxIyEHLdHaoIZEqKsPxU9MdyBg==\n \n elegant-spinner@^1.0.1:\n version \"1.0.1\"\n@@ -12887,15 +12910,6 @@ form-data@^3.0.0:\n combined-stream \"^1.0.8\"\n mime-types \"^2.1.12\"\n \n-form-data@^4.0.0:\n- version \"4.0.0\"\n- resolved \"https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452\"\n- integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==\n- dependencies:\n- asynckit \"^0.4.0\"\n- combined-stream \"^1.0.8\"\n- mime-types \"^2.1.12\"\n-\n form-data@~2.3.2:\n version \"2.3.3\"\n resolved \"https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6\"\n@@ -21198,11 +21212,13 @@ proto-list@~1.2.1:\n integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk=\n \n proto3-json-serializer@^0.1.5:\n- version \"0.1.5\"\n- resolved \"https://registry.yarnpkg.com/proto3-json-serializer/-/proto3-json-serializer-0.1.5.tgz#c619769a59dc7fd8adf4e6c5060b9bf3039c8304\"\n- integrity sha512-G395jcZkgNXNeS+6FGqd09TsXeoCs9wmBWByDiwFy7Yd7HD8pyfyvf6q+rGh7PhT4AshRpG4NowzoKYUtkNjKg==\n+ version \"0.1.6\"\n+ resolved \"https://registry.yarnpkg.com/proto3-json-serializer/-/proto3-json-serializer-0.1.6.tgz#67cf3b8d5f4c8bebfc410698ad3b1ed64da39c7b\"\n+ integrity sha512-tGbV6m6Kad8NqxMh5hw87euPS0YoZSAOIfvR01zYkQV8Gpx1V/8yU/0gCKCvfCkhAJsjvzzhnnsdQxA1w7PSog==\n+ dependencies:\n+ protobufjs \"^6.11.2\"\n \[email protected], protobufjs@^6.10.0:\[email protected], protobufjs@^6.10.0, protobufjs@^6.11.2:\n version \"6.11.2\"\n resolved \"https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.2.tgz#de39fabd4ed32beaa08e9bb1e30d08544c1edf8b\"\n integrity sha512-4BQJoPooKJl2G9j3XftkIXjoC9C0Av2NOrWmbLWT1vH32GcSUHjM0Arra6UfTsVyfMAuFzaLucXn1sadxJydAw==\n", "diff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 57d94a4..04de03b 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -206,7 +206,7 @@ jobs:\n - name: build and push dev docs\n run: |\n nix develop --ignore-environment -c \\\n- mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}'\n+ mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}' --ignore-version\n \n simulate_release:\n runs-on: ubuntu-latest\n", "diff --git a/docs/content/Caching/Caching.md b/docs/content/Caching/Caching.md\nindex d873a52..9706dda 100644\n--- a/docs/content/Caching/Caching.md\n+++ b/docs/content/Caching/Caching.md\n@@ -135,8 +135,9 @@ If nothing is found in the cache, the query is executed in the database and the \n is returned as well as updating the cache.\n \n If an existing value is present in the cache and the `refreshKey` value for\n-the query hasn't changed, the cached value will be returned. Otherwise, a\n-[query renewal](#in-memory-cache-force-query-renewal) will be performed.\n+the query hasn't changed, the cached value will be returned. Otherwise, a SQL query will be executed either against the pre-aggregations storage or the source database to populate the cache with the results and return them.\n+\n+\n \n ### Refresh Keys\n \n", "diff --git a/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java b/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\nindex 33410da..edd0588 100644\n--- a/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\n+++ b/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\n@@ -787,7 +787,8 @@ final class JsonSerializableToJsonTest {\n }\n }],\n \"elementId\": \"activity\"\n- }]\n+ }],\n+ \"activatedElementInstanceKeys\": []\n }\n \"\"\"\n },\n@@ -803,7 +804,8 @@ final class JsonSerializableToJsonTest {\n {\n \"processInstanceKey\": 1,\n \"terminateInstructions\": [],\n- \"activateInstructions\": []\n+ \"activateInstructions\": [],\n+ \"activatedElementInstanceKeys\": []\n }\n \"\"\"\n },\n"]
5
["6ae067153cd2608018fd3da76bd6d00a08da4b3a", "5ef4fd29a4cef69c6c348dd25156934df041f183", "21228c55b7045d9b2225f65e6231184ff332b071", "c351088bce98594c740a39546ce3655c91554a5d", "f7cc7b263afeb27eef393b7497db8dad8ebb0518"]
["feat", "build", "ci", "docs", "test"]
add test for spurious cross join,add flag to wait for workflow instance result - with the flag withResult the create instance command will wait for the workflow to complete - optional a list of variable names can be specified to limit the fetched variables,add missing region to cloudformation_stack_set,fix readme,do not query all networks
["diff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 4ad32a6..b2e5d72 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -841,3 +841,63 @@ def test_filter_group_by_agg_with_same_name():\n )\n ex = sa.select([t0]).where(t0.c.bigint_col == 60)\n _check(expr, ex)\n+\n+\[email protected]\n+def person():\n+ return ibis.table(\n+ dict(id=\"string\", personal=\"string\", family=\"string\"),\n+ name=\"person\",\n+ )\n+\n+\[email protected]\n+def visited():\n+ return ibis.table(\n+ dict(id=\"int32\", site=\"string\", dated=\"string\"),\n+ name=\"visited\",\n+ )\n+\n+\[email protected]\n+def survey():\n+ return ibis.table(\n+ dict(\n+ taken=\"int32\",\n+ person=\"string\",\n+ quant=\"string\",\n+ reading=\"float32\",\n+ ),\n+ name=\"survey\",\n+ )\n+\n+\n+def test_no_cross_join(person, visited, survey):\n+ expr = person.join(survey, person.id == survey.person).join(\n+ visited,\n+ visited.id == survey.taken,\n+ )\n+\n+ context = AlchemyContext(compiler=AlchemyCompiler)\n+ _ = AlchemyCompiler.to_sql(expr, context)\n+\n+ t0 = context.get_ref(person)\n+ t1 = context.get_ref(survey)\n+ t2 = context.get_ref(visited)\n+\n+ from_ = t0.join(t1, t0.c.id == t1.c.person).join(t2, t2.c.id == t1.c.taken)\n+ ex = sa.select(\n+ [\n+ t0.c.id.label(\"id_x\"),\n+ t0.c.personal,\n+ t0.c.family,\n+ t1.c.taken,\n+ t1.c.person,\n+ t1.c.quant,\n+ t1.c.reading,\n+ t2.c.id.label(\"id_y\"),\n+ t2.c.site,\n+ t2.c.dated,\n+ ]\n+ ).select_from(from_)\n+ _check(expr, ex)\n", "diff --git a/clients/zbctl/cmd/createInstance.go b/clients/zbctl/cmd/createInstance.go\nindex 016f115..85ac0be 100644\n--- a/clients/zbctl/cmd/createInstance.go\n+++ b/clients/zbctl/cmd/createInstance.go\n@@ -15,13 +15,15 @@ package cmd\n \n import (\n \t\"github.com/zeebe-io/zeebe/clients/go/commands\"\n+\t\"strings\"\n \n \t\"github.com/spf13/cobra\"\n )\n \n var (\n-\tcreateInstanceVersionFlag int32\n-\tcreateInstanceVariablesFlag string\n+\tcreateInstanceVersionFlag int32\n+\tcreateInstanceVariablesFlag string\n+\tcreateInstanceWithResultFlag []string\n )\n \n var createInstanceCmd = &cobra.Command{\n@@ -39,12 +41,29 @@ var createInstanceCmd = &cobra.Command{\n \t\t\treturn err\n \t\t}\n \n-\t\tresponse, err := zbCmd.Send()\n-\t\tif err != nil {\n-\t\t\treturn err\n-\t\t}\n+\t\tif createInstanceWithResultFlag == nil {\n+\t\t\tresponse, err := zbCmd.Send()\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\treturn printJson(response)\n+\t\t} else {\n+\t\t\tvariableNames := []string{}\n+\t\t\tfor _, variableName := range createInstanceWithResultFlag {\n+\t\t\t\ttrimedVariableName := strings.TrimSpace(variableName)\n+\t\t\t\tif trimedVariableName != \"\" {\n+\t\t\t\t\tvariableNames = append(variableNames, trimedVariableName)\n+\t\t\t\t}\n+\t\t\t}\n+\t\t\tresponse, err := zbCmd.WithResult().FetchVariables(variableNames...).Send()\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\treturn printJson(response)\n \n-\t\treturn printJson(response)\n+\t\t}\n \t},\n }\n \n@@ -58,4 +77,11 @@ func init() {\n \tcreateInstanceCmd.\n \t\tFlags().\n \t\tInt32Var(&createInstanceVersionFlag, \"version\", commands.LatestVersion, \"Specify version of workflow which should be executed.\")\n+\n+\tcreateInstanceCmd.\n+\t\tFlags().\n+\t\tStringSliceVar(&createInstanceWithResultFlag, \"withResult\", nil, \"Specify to await result of workflow, optional a list of variable names can be provided to limit the returned variables\")\n+\n+\t// hack to use --withResult without values\n+\tcreateInstanceCmd.Flag(\"withResult\").NoOptDefVal = \" \"\n }\n", "diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n", "diff --git a/crates/dagger-sdk/README.md b/crates/dagger-sdk/README.md\nindex ed96be1..974fb7f 100644\n--- a/crates/dagger-sdk/README.md\n+++ b/crates/dagger-sdk/README.md\n@@ -29,9 +29,9 @@ fn main() -> eyre::Result<()> {\n let client = dagger_sdk::connect()?;\n \n let version = client\n- .container(None)\n- .from(\"golang:1.19\".into())\n- .with_exec(vec![\"go\".into(), \"version\".into()], None)\n+ .container()\n+ .from(\"golang:1.19\")\n+ .with_exec(vec![\"go\", \"version\"])\n .stdout()?;\n \n println!(\"Hello from Dagger and {}\", version.trim());\n", "diff --git a/src/environment/windows_win32.go b/src/environment/windows_win32.go\nindex be0c7b5..b90e0ff 100644\n--- a/src/environment/windows_win32.go\n+++ b/src/environment/windows_win32.go\n@@ -203,7 +203,6 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \tvar pIFTable2 *MIN_IF_TABLE2\n \t_, _, _ = hGetIfTable2.Call(uintptr(unsafe.Pointer(&pIFTable2)))\n \n-\tSSIDs, _ := env.getAllWifiSSID()\n \tnetworks := make([]*Connection, 0)\n \n \tfor i := 0; i < int(pIFTable2.NumEntries); i++ {\n@@ -220,11 +219,13 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t}\n \n \t\tvar connectionType ConnectionType\n+\t\tvar ssid string\n \t\tswitch networkInterface.Type {\n \t\tcase 6:\n \t\t\tconnectionType = ETHERNET\n \t\tcase 71:\n \t\t\tconnectionType = WIFI\n+\t\t\tssid = env.getWiFiSSID(networkInterface.InterfaceGUID)\n \t\tcase 237, 234, 244:\n \t\t\tconnectionType = CELLULAR\n \t\t}\n@@ -243,10 +244,7 @@ func (env *ShellEnvironment) getConnections() []*Connection {\n \t\t\tName: description, // we want a relatable name, alias isn't that\n \t\t\tTransmitRate: networkInterface.TransmitLinkSpeed,\n \t\t\tReceiveRate: networkInterface.ReceiveLinkSpeed,\n-\t\t}\n-\n-\t\tif SSID, OK := SSIDs[network.Name]; OK {\n-\t\t\tnetwork.SSID = SSID\n+\t\t\tSSID: ssid,\n \t\t}\n \n \t\tnetworks = append(networks, network)\n@@ -322,13 +320,21 @@ type MIB_IF_ROW2 struct { //nolint: revive\n \tOutQLen uint64\n }\n \n-func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n+var (\n+\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n+\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n+\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n+\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n+)\n+\n+func (env *ShellEnvironment) getWiFiSSID(guid windows.GUID) string {\n+\t// Query wifi connection state\n \tvar pdwNegotiatedVersion uint32\n \tvar phClientHandle uint32\n \te, _, err := hWlanOpenHandle.Call(uintptr(uint32(2)), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&pdwNegotiatedVersion)), uintptr(unsafe.Pointer(&phClientHandle)))\n \tif e != 0 {\n \t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n+\t\treturn \"\"\n \t}\n \n \t// defer closing handle\n@@ -336,42 +342,11 @@ func (env *ShellEnvironment) getAllWifiSSID() (map[string]string, error) {\n \t\t_, _, _ = hWlanCloseHandle.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)))\n \t}()\n \n-\tssid := make(map[string]string)\n-\t// list interfaces\n-\tvar interfaceList *WLAN_INTERFACE_INFO_LIST\n-\te, _, err = hWlanEnumInterfaces.Call(uintptr(phClientHandle), uintptr(unsafe.Pointer(nil)), uintptr(unsafe.Pointer(&interfaceList)))\n-\tif e != 0 {\n-\t\tenv.Log(Error, \"getAllWifiSSID\", err.Error())\n-\t\treturn nil, err\n-\t}\n-\n-\t// use first interface that is connected\n-\tnumberOfInterfaces := int(interfaceList.dwNumberOfItems)\n-\tinfoSize := unsafe.Sizeof(interfaceList.InterfaceInfo[0])\n-\tfor i := 0; i < numberOfInterfaces; i++ {\n-\t\tnetwork := (*WLAN_INTERFACE_INFO)(unsafe.Pointer(uintptr(unsafe.Pointer(&interfaceList.InterfaceInfo[0])) + uintptr(i)*infoSize))\n-\t\tif network.isState == 1 {\n-\t\t\twifiInterface := strings.TrimRight(string(utf16.Decode(network.strInterfaceDescription[:])), \"\\x00\")\n-\t\t\tssid[wifiInterface] = env.getWiFiSSID(network, phClientHandle)\n-\t\t}\n-\t}\n-\treturn ssid, nil\n-}\n-\n-var (\n-\twlanapi = syscall.NewLazyDLL(\"wlanapi.dll\")\n-\thWlanOpenHandle = wlanapi.NewProc(\"WlanOpenHandle\")\n-\thWlanCloseHandle = wlanapi.NewProc(\"WlanCloseHandle\")\n-\thWlanEnumInterfaces = wlanapi.NewProc(\"WlanEnumInterfaces\")\n-\thWlanQueryInterface = wlanapi.NewProc(\"WlanQueryInterface\")\n-)\n-\n-func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHandle uint32) string {\n-\t// Query wifi connection state\n \tvar dataSize uint16\n \tvar wlanAttr *WLAN_CONNECTION_ATTRIBUTES\n-\te, _, _ := hWlanQueryInterface.Call(uintptr(clientHandle),\n-\t\tuintptr(unsafe.Pointer(&network.InterfaceGuid)),\n+\n+\te, _, _ = hWlanQueryInterface.Call(uintptr(phClientHandle),\n+\t\tuintptr(unsafe.Pointer(&guid)),\n \t\tuintptr(7), // wlan_intf_opcode_current_connection\n \t\tuintptr(unsafe.Pointer(nil)),\n \t\tuintptr(unsafe.Pointer(&dataSize)),\n@@ -389,18 +364,6 @@ func (env *ShellEnvironment) getWiFiSSID(network *WLAN_INTERFACE_INFO, clientHan\n \treturn string(ssid.ucSSID[0:ssid.uSSIDLength])\n }\n \n-type WLAN_INTERFACE_INFO_LIST struct { //nolint: revive\n-\tdwNumberOfItems uint32\n-\tdwIndex uint32 //nolint: unused\n-\tInterfaceInfo [256]WLAN_INTERFACE_INFO\n-}\n-\n-type WLAN_INTERFACE_INFO struct { //nolint: revive\n-\tInterfaceGuid syscall.GUID //nolint: revive\n-\tstrInterfaceDescription [256]uint16\n-\tisState uint32\n-}\n-\n type WLAN_CONNECTION_ATTRIBUTES struct { //nolint: revive\n \tisState uint32 //nolint: unused\n \twlanConnectionMode uint32 //nolint: unused\n"]
5
["8dac3fe5a7a56356ca95547fcf7925bec8d9c1dd", "f3107f1a8eb124b55e775d23416540f49204a19e", "304d0588f634e9e72087a706367c53af9c7f7180", "04e70ce964b343e28b3dbd0c46d10ccda958ab8c", "8a9a022baa15befc325f87892c6bdae25b35bc33"]
["test", "feat", "fix", "docs", "refactor"]
ecma 7 ready,verify process can start at supported element types Verifies a PI can be started at specific element types. The test will deploy the process, start an instance at the desired start element and verify that it has been activated succesfully.,add ability to get all encoded values,add system get version info Fiddle example (#20536),dedup redundant imports
["diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex f7c6b23..4a00c65 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -266,7 +266,7 @@ module.exports = {\n : new UglifyJsPlugin({\n uglifyOptions: {\n ie8: false,\n- ecma: 6,\n+ ecma: 7,\n compress: {\n warnings: false,\n // Disabled because of an issue with Uglify breaking seemingly valid code:\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\nnew file mode 100644\nindex 0000000..a505307\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\n@@ -0,0 +1,233 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.processinstance;\n+\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.groups.Tuple.tuple;\n+\n+import io.camunda.zeebe.engine.util.EngineRule;\n+import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.model.bpmn.BpmnModelInstance;\n+import io.camunda.zeebe.protocol.record.Record;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n+import io.camunda.zeebe.test.util.record.RecordingExporterTestWatcher;\n+import java.util.Collection;\n+import java.util.Collections;\n+import java.util.List;\n+import java.util.Map;\n+import org.junit.ClassRule;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.junit.runners.Parameterized;\n+import org.junit.runners.Parameterized.Parameters;\n+\n+@RunWith(Parameterized.class)\n+public class CreateProcessInstanceSupportedElementTest {\n+\n+ @ClassRule public static final EngineRule ENGINE = EngineRule.singlePartition();\n+ private static final String PROCESS_ID = \"processId\";\n+ private static final String CHILD_PROCESS_ID = \"childProcessId\";\n+ private static final String START_ELEMENT_ID = \"startElement\";\n+ private static final String MESSAGE = \"message\";\n+ private static final String JOBTYPE = \"jobtype\";\n+\n+ @Rule\n+ public final RecordingExporterTestWatcher recordingExporterTestWatcher =\n+ new RecordingExporterTestWatcher();\n+\n+ private final Scenario scenario;\n+\n+ public CreateProcessInstanceSupportedElementTest(final Scenario scenario) {\n+ this.scenario = scenario;\n+ }\n+\n+ @Parameters(name = \"{0}\")\n+ public static Collection<Object> scenarios() {\n+ return List.of(\n+ new Scenario(\n+ BpmnElementType.SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .subProcess(START_ELEMENT_ID)\n+ .embeddedSubProcess()\n+ .startEvent()\n+ .subProcessDone()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .eventSubProcess(\n+ START_ELEMENT_ID, e -> e.startEvent().timerWithDuration(\"PT1H\").endEvent())\n+ .startEvent()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_CATCH_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_THROW_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateThrowEvent(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.END_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().endEvent(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SERVICE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.RECEIVE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .receiveTask(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.USER_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().userTask(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.MANUAL_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .manualTask(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EXCLUSIVE_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .exclusiveGateway(START_ELEMENT_ID)\n+ .defaultFlow()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.PARALLEL_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .parallelGateway(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_BASED_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .eventBasedGateway(START_ELEMENT_ID)\n+ .intermediateCatchEvent()\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .moveToLastGateway()\n+ .intermediateCatchEvent()\n+ .timerWithDuration(\"PT1H\")\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.MULTI_INSTANCE_BODY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(\n+ START_ELEMENT_ID,\n+ t ->\n+ t.zeebeJobType(JOBTYPE)\n+ .multiInstance(m -> m.parallel().zeebeInputCollectionExpression(\"[1]\")))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.CALL_ACTIVITY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .callActivity(START_ELEMENT_ID, c -> c.zeebeProcessId(CHILD_PROCESS_ID))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.BUSINESS_RULE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .businessRuleTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SCRIPT_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .scriptTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SEND_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .sendTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()));\n+ }\n+\n+ @Test\n+ public void testProcessInstanceCanStartAtElementType() {\n+ // given\n+ ENGINE.deployment().withXmlResource(scenario.modelInstance).deploy();\n+ if (scenario.type == BpmnElementType.CALL_ACTIVITY) {\n+ ENGINE.deployment().withXmlResource(getChildProcess()).deploy();\n+ }\n+\n+ // when\n+ final long instanceKey =\n+ ENGINE\n+ .processInstance()\n+ .ofBpmnProcessId(PROCESS_ID)\n+ .withStartInstruction(START_ELEMENT_ID)\n+ .withVariables(scenario.variables)\n+ .create();\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.processInstanceRecords()\n+ .withProcessInstanceKey(instanceKey)\n+ .onlyEvents()\n+ .limit(\n+ r ->\n+ r.getValue().getBpmnElementType() == scenario.type\n+ && r.getIntent() == ProcessInstanceIntent.ELEMENT_ACTIVATED))\n+ .extracting(record -> record.getValue().getBpmnElementType(), Record::getIntent)\n+ .containsSequence(\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATED),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATED));\n+ }\n+\n+ private BpmnModelInstance getChildProcess() {\n+ return Bpmn.createExecutableProcess(CHILD_PROCESS_ID).startEvent().endEvent().done();\n+ }\n+\n+ record Scenario(\n+ BpmnElementType type, BpmnModelInstance modelInstance, Map<String, Object> variables) {}\n+}\n", "diff --git a/delorean_mem_qe/src/column.rs b/delorean_mem_qe/src/column.rs\nindex bc89cb2..b3df18e 100644\n--- a/delorean_mem_qe/src/column.rs\n+++ b/delorean_mem_qe/src/column.rs\n@@ -537,6 +537,22 @@ impl Column {\n }\n }\n \n+ /// Materialise all of the encoded values.\n+ pub fn all_encoded_values(&self) -> Vector {\n+ match self {\n+ Column::String(c) => {\n+ let now = std::time::Instant::now();\n+ let v = c.all_encoded_values();\n+ log::debug!(\"time getting all encoded values {:?}\", now.elapsed());\n+\n+ log::debug!(\"dictionary {:?}\", c.data.dictionary());\n+ Vector::Integer(v)\n+ }\n+ Column::Float(c) => Vector::Float(c.all_encoded_values()),\n+ Column::Integer(c) => Vector::Integer(c.all_encoded_values()),\n+ }\n+ }\n+\n /// Given an encoded value for a row, materialise and return the decoded\n /// version.\n ///\n@@ -986,6 +1002,10 @@ impl String {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ self.data.all_encoded_values()\n+ }\n+\n /// Return the decoded value for an encoded ID.\n ///\n /// Panics if there is no decoded value for the provided id\n@@ -1037,6 +1057,10 @@ impl Float {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<f64> {\n+ self.data.all_encoded_values()\n+ }\n+\n pub fn scan_from(&self, row_id: usize) -> &[f64] {\n self.data.scan_from(row_id)\n }\n@@ -1106,6 +1130,10 @@ impl Integer {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ self.data.all_encoded_values()\n+ }\n+\n pub fn scan_from(&self, row_id: usize) -> &[i64] {\n self.data.scan_from(row_id)\n }\ndiff --git a/delorean_mem_qe/src/encoding.rs b/delorean_mem_qe/src/encoding.rs\nindex d6a865a..4b057cf 100644\n--- a/delorean_mem_qe/src/encoding.rs\n+++ b/delorean_mem_qe/src/encoding.rs\n@@ -68,6 +68,12 @@ where\n self.values(row_ids)\n }\n \n+ /// Return all encoded values. For this encoding this is just the decoded\n+ /// values\n+ pub fn all_encoded_values(&self) -> Vec<T> {\n+ self.values.clone()\n+ }\n+\n // TODO(edd): fix this when added NULL support\n pub fn scan_from_until_some(&self, _row_id: usize) -> Option<T> {\n unreachable!(\"to remove\");\n@@ -485,6 +491,26 @@ impl DictionaryRLE {\n out\n }\n \n+ // values materialises a vector of references to all logical values in the\n+ // encoding.\n+ pub fn all_values(&mut self) -> Vec<Option<&String>> {\n+ let mut out: Vec<Option<&String>> = Vec::with_capacity(self.total as usize);\n+\n+ // build reverse mapping.\n+ let mut idx_value = BTreeMap::new();\n+ for (k, v) in &self.entry_index {\n+ idx_value.insert(v, k);\n+ }\n+ assert_eq!(idx_value.len(), self.entry_index.len());\n+\n+ for (idx, rl) in &self.run_lengths {\n+ // TODO(edd): fix unwrap - we know that the value exists in map...\n+ let v = idx_value.get(&idx).unwrap().as_ref();\n+ out.extend(iter::repeat(v).take(*rl as usize));\n+ }\n+ out\n+ }\n+\n /// Return the decoded value for an encoded ID.\n ///\n /// Panics if there is no decoded value for the provided id\n@@ -528,22 +554,13 @@ impl DictionaryRLE {\n out\n }\n \n- // values materialises a vector of references to all logical values in the\n- // encoding.\n- pub fn all_values(&mut self) -> Vec<Option<&String>> {\n- let mut out: Vec<Option<&String>> = Vec::with_capacity(self.total as usize);\n-\n- // build reverse mapping.\n- let mut idx_value = BTreeMap::new();\n- for (k, v) in &self.entry_index {\n- idx_value.insert(v, k);\n- }\n- assert_eq!(idx_value.len(), self.entry_index.len());\n+ // all_encoded_values materialises a vector of all encoded values for the\n+ // column.\n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ let mut out: Vec<i64> = Vec::with_capacity(self.total as usize);\n \n for (idx, rl) in &self.run_lengths {\n- // TODO(edd): fix unwrap - we know that the value exists in map...\n- let v = idx_value.get(&idx).unwrap().as_ref();\n- out.extend(iter::repeat(v).take(*rl as usize));\n+ out.extend(iter::repeat(*idx as i64).take(*rl as usize));\n }\n out\n }\ndiff --git a/delorean_mem_qe/src/segment.rs b/delorean_mem_qe/src/segment.rs\nindex c058df0..f8c5005 100644\n--- a/delorean_mem_qe/src/segment.rs\n+++ b/delorean_mem_qe/src/segment.rs\n@@ -228,7 +228,7 @@ impl Segment {\n group_columns: &[String],\n aggregates: &[(String, AggregateType)],\n window: i64,\n- ) -> BTreeMap<Vec<String>, Vec<(String, Option<column::Aggregate>)>> {\n+ ) -> BTreeMap<Vec<i64>, Vec<(&String, &AggregateType, Option<column::Aggregate>)>> {\n // Build a hash table - essentially, scan columns for matching row ids,\n // emitting the encoded value for each column and track those value\n // combinations in a hashmap with running aggregates.\n@@ -242,6 +242,10 @@ impl Segment {\n assert_ne!(group_columns[group_columns.len() - 1], \"time\");\n }\n \n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\n+ // then it will be a lot faster to not build filtered_row_ids and just\n+ // get all encoded values for each grouping column...\n+\n // filter on predicates and time\n let filtered_row_ids: croaring::Bitmap;\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\n@@ -263,7 +267,12 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n+\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -325,10 +334,10 @@ impl Segment {\n .collect::<Vec<_>>();\n \n // hashMap is about 20% faster than BTreeMap in this case\n- let mut hash_table: HashMap<\n+ let mut hash_table: BTreeMap<\n Vec<i64>,\n Vec<(&String, &AggregateType, Option<column::Aggregate>)>,\n- > = HashMap::new();\n+ > = BTreeMap::new();\n \n let mut aggregate_row: Vec<(&str, Option<column::Scalar>)> =\n std::iter::repeat_with(|| (\"\", None))\n@@ -406,8 +415,10 @@ impl Segment {\n }\n processed_rows += 1;\n }\n+ // println!(\"groups: {:?}\", hash_table.len());\n log::debug!(\"({:?} rows processed) {:?}\", processed_rows, hash_table);\n BTreeMap::new()\n+ // hash_table\n }\n \n pub fn aggregate_by_group_using_sort(\n@@ -451,7 +462,11 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -557,6 +572,10 @@ impl Segment {\n assert_ne!(group_columns[group_columns.len() - 1], \"time\");\n }\n \n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\n+ // then it will be a lot faster to not build filtered_row_ids and just\n+ // get all encoded values for each grouping column...\n+\n // filter on predicates and time\n let filtered_row_ids: croaring::Bitmap;\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\n@@ -577,7 +596,11 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -709,6 +732,7 @@ impl Segment {\n aggregates: group_key_aggregates,\n });\n \n+ // println!(\"groups: {:?}\", results.len());\n log::debug!(\"({:?} rows processed) {:?}\", processed_rows, results);\n // results\n vec![]\n", "diff --git a/docs/fiddles/system/system-information/get-version-information/index.html b/docs/fiddles/system/system-information/get-version-information/index.html\nnew file mode 100644\nindex 0000000..0867bc3\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/index.html\n@@ -0,0 +1,26 @@\n+<!DOCTYPE html>\n+<html>\n+ <head>\n+ <meta charset=\"UTF-8\">\n+ </head>\n+ <body>\n+ <div>\n+ <div>\n+ <h1>Get version information</h1>\n+ <i>Supports: Win, macOS, Linux <span>|</span> Process: Both</i>\n+ <div>\n+ <div>\n+ <button id=\"version-info\">View Demo</button>\n+ <span id=\"got-version-info\"></span>\n+ </div>\n+ <p>The <code>process</code> module is built into Node.js (therefore you can use this in both the main and renderer processes) and in Electron apps this object has a few more useful properties on it.</p>\n+ <p>The example below gets the version of Electron in use by the app.</p>\n+ <p>See the <a href=\"http://electron.atom.io/docs/api/process\">process documentation <span>(opens in new window)</span></a> for more.</p>\n+ </div>\n+ </div>\n+ </div>\n+ </body>\n+ <script>\n+ require('./renderer.js')\n+ </script>\n+</html>\ndiff --git a/docs/fiddles/system/system-information/get-version-information/main.js b/docs/fiddles/system/system-information/get-version-information/main.js\nnew file mode 100644\nindex 0000000..1f9f917\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/main.js\n@@ -0,0 +1,25 @@\n+const { app, BrowserWindow } = require('electron')\n+\n+let mainWindow = null\n+\n+function createWindow () {\n+ const windowOptions = {\n+ width: 600,\n+ height: 400,\n+ title: 'Get version information',\n+ webPreferences: {\n+ nodeIntegration: true\n+ }\n+ }\n+\n+ mainWindow = new BrowserWindow(windowOptions)\n+ mainWindow.loadFile('index.html')\n+\n+ mainWindow.on('closed', () => {\n+ mainWindow = null\n+ })\n+}\n+\n+app.on('ready', () => {\n+ createWindow()\n+})\ndiff --git a/docs/fiddles/system/system-information/get-version-information/renderer.js b/docs/fiddles/system/system-information/get-version-information/renderer.js\nnew file mode 100644\nindex 0000000..40f7f2c\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/renderer.js\n@@ -0,0 +1,8 @@\n+const versionInfoBtn = document.getElementById('version-info')\n+\n+const electronVersion = process.versions.electron\n+\n+versionInfoBtn.addEventListener('click', () => {\n+ const message = `This app is using Electron version: ${electronVersion}`\n+ document.getElementById('got-version-info').innerHTML = message\n+})\n", "diff --git a/ibis/backends/base/__init__.py b/ibis/backends/base/__init__.py\nindex effd44c..a59c0ec 100644\n--- a/ibis/backends/base/__init__.py\n+++ b/ibis/backends/base/__init__.py\n@@ -31,7 +31,7 @@ import ibis.common.exceptions as exc\n import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n \n __all__ = ('BaseBackend', 'Database', 'connect')\n \ndiff --git a/ibis/backends/base/sql/__init__.py b/ibis/backends/base/sql/__init__.py\nindex e4f2129..7bbdaf9 100644\n--- a/ibis/backends/base/sql/__init__.py\n+++ b/ibis/backends/base/sql/__init__.py\n@@ -12,7 +12,7 @@ import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import BaseBackend\n from ibis.backends.base.sql.compiler import Compiler\n \ndiff --git a/ibis/backends/base/sql/alchemy/__init__.py b/ibis/backends/base/sql/alchemy/__init__.py\nindex 71cc0e8..ab89d7d 100644\n--- a/ibis/backends/base/sql/alchemy/__init__.py\n+++ b/ibis/backends/base/sql/alchemy/__init__.py\n@@ -11,7 +11,7 @@ import ibis\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.alchemy.database import AlchemyDatabase, AlchemyTable\n from ibis.backends.base.sql.alchemy.datatypes import (\ndiff --git a/ibis/backends/base/sql/alchemy/query_builder.py b/ibis/backends/base/sql/alchemy/query_builder.py\nindex 54c74ba..0ec432f 100644\n--- a/ibis/backends/base/sql/alchemy/query_builder.py\n+++ b/ibis/backends/base/sql/alchemy/query_builder.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import functools\n \n import sqlalchemy as sa\n-import sqlalchemy.sql as sql\n+from sqlalchemy import sql\n \n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/base/sql/compiler/base.py b/ibis/backends/base/sql/compiler/base.py\nindex 84102aa..fb44667 100644\n--- a/ibis/backends/base/sql/compiler/base.py\n+++ b/ibis/backends/base/sql/compiler/base.py\n@@ -7,7 +7,7 @@ import toolz\n \n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n class DML(abc.ABC):\ndiff --git a/ibis/backends/base/sql/compiler/query_builder.py b/ibis/backends/base/sql/compiler/query_builder.py\nindex a2d5214..95f5e8d 100644\n--- a/ibis/backends/base/sql/compiler/query_builder.py\n+++ b/ibis/backends/base/sql/compiler/query_builder.py\n@@ -8,7 +8,7 @@ import toolz\n import ibis.common.exceptions as com\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.compiler.base import DML, QueryAST, SetOp\n from ibis.backends.base.sql.compiler.select_builder import SelectBuilder, _LimitSpec\n from ibis.backends.base.sql.compiler.translator import ExprTranslator, QueryContext\ndiff --git a/ibis/backends/base/sql/registry/main.py b/ibis/backends/base/sql/registry/main.py\nindex 77f70a5..586ace5 100644\n--- a/ibis/backends/base/sql/registry/main.py\n+++ b/ibis/backends/base/sql/registry/main.py\n@@ -4,7 +4,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import (\n aggregate,\n binary_infix,\ndiff --git a/ibis/backends/base/sql/registry/timestamp.py b/ibis/backends/base/sql/registry/timestamp.py\nindex 412eab1..3c8571f 100644\n--- a/ibis/backends/base/sql/registry/timestamp.py\n+++ b/ibis/backends/base/sql/registry/timestamp.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n def extract_field(sql_attr):\ndiff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 8db6672..bb1b9ba 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -3,9 +3,9 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.backends.clickhouse.tests.conftest import (\n CLICKHOUSE_HOST,\n CLICKHOUSE_PASS,\ndiff --git a/ibis/backends/conftest.py b/ibis/backends/conftest.py\nindex 3a974da..ba7ad75 100644\n--- a/ibis/backends/conftest.py\n+++ b/ibis/backends/conftest.py\n@@ -20,7 +20,7 @@ if TYPE_CHECKING:\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import _get_backend_names\n \n TEST_TABLES = {\ndiff --git a/ibis/backends/dask/execution/util.py b/ibis/backends/dask/execution/util.py\nindex 61bff7e..7ed0c10 100644\n--- a/ibis/backends/dask/execution/util.py\n+++ b/ibis/backends/dask/execution/util.py\n@@ -9,13 +9,13 @@ import pandas as pd\n from dask.dataframe.groupby import SeriesGroupBy\n \n import ibis.backends.pandas.execution.util as pd_util\n-import ibis.common.graph as graph\n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n import ibis.util\n from ibis.backends.dask.core import execute\n from ibis.backends.pandas.trace import TraceTwoLevelDispatcher\n+from ibis.common import graph\n from ibis.expr.scope import Scope\n \n if TYPE_CHECKING:\ndiff --git a/ibis/backends/duckdb/datatypes.py b/ibis/backends/duckdb/datatypes.py\nindex fd6b8f5..52c0719 100644\n--- a/ibis/backends/duckdb/datatypes.py\n+++ b/ibis/backends/duckdb/datatypes.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import parsy as p\n import toolz\n \n-import ibis.util as util\n+from ibis import util\n from ibis.common.parsing import (\n COMMA,\n FIELD,\ndiff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\nindex 4ad2057..8299a28 100644\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -20,7 +20,7 @@ import ibis.config\n import ibis.expr.datatypes as dt\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.ddl import (\n CTAS,\ndiff --git a/ibis/backends/impala/client.py b/ibis/backends/impala/client.py\nindex 6655ce7..78d526f 100644\n--- a/ibis/backends/impala/client.py\n+++ b/ibis/backends/impala/client.py\n@@ -10,7 +10,7 @@ import sqlalchemy as sa\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import Database\n from ibis.backends.base.sql.compiler import DDL, DML\n from ibis.backends.base.sql.ddl import (\ndiff --git a/ibis/backends/impala/pandas_interop.py b/ibis/backends/impala/pandas_interop.py\nindex f410a8b..e687884 100644\n--- a/ibis/backends/impala/pandas_interop.py\n+++ b/ibis/backends/impala/pandas_interop.py\n@@ -22,7 +22,7 @@ from posixpath import join as pjoin\n import ibis.backends.pandas.client # noqa: F401\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.config import options\n \n \ndiff --git a/ibis/backends/impala/tests/conftest.py b/ibis/backends/impala/tests/conftest.py\nindex 1075ebe..a815be5 100644\n--- a/ibis/backends/impala/tests/conftest.py\n+++ b/ibis/backends/impala/tests/conftest.py\n@@ -13,8 +13,7 @@ import pytest\n \n import ibis\n import ibis.expr.types as ir\n-import ibis.util as util\n-from ibis import options\n+from ibis import options, util\n from ibis.backends.base import BaseBackend\n from ibis.backends.conftest import TEST_TABLES, _random_identifier\n from ibis.backends.impala.compiler import ImpalaCompiler, ImpalaExprTranslator\ndiff --git a/ibis/backends/impala/tests/test_client.py b/ibis/backends/impala/tests/test_client.py\nindex 0b56054..3fcca3a 100644\n--- a/ibis/backends/impala/tests/test_client.py\n+++ b/ibis/backends/impala/tests/test_client.py\n@@ -7,9 +7,9 @@ import pytz\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_ddl.py b/ibis/backends/impala/tests/test_ddl.py\nindex 870c4dc..2346a3d 100644\n--- a/ibis/backends/impala/tests/test_ddl.py\n+++ b/ibis/backends/impala/tests/test_ddl.py\n@@ -6,7 +6,7 @@ import ibis\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.ddl import fully_qualified_re\n from ibis.tests.util import assert_equal\n \ndiff --git a/ibis/backends/impala/tests/test_exprs.py b/ibis/backends/impala/tests/test_exprs.py\nindex cfc8552..1d6f44f 100644\n--- a/ibis/backends/impala/tests/test_exprs.py\n+++ b/ibis/backends/impala/tests/test_exprs.py\n@@ -5,10 +5,10 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.types as ir\n from ibis import literal as L\n from ibis.backends.impala.compiler import ImpalaCompiler\n+from ibis.expr import api\n from ibis.expr.datatypes import Category\n \n \ndiff --git a/ibis/backends/impala/tests/test_partition.py b/ibis/backends/impala/tests/test_partition.py\nindex 1f96e7d..44217a4 100644\n--- a/ibis/backends/impala/tests/test_partition.py\n+++ b/ibis/backends/impala/tests/test_partition.py\n@@ -6,7 +6,7 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_udf.py b/ibis/backends/impala/tests/test_udf.py\nindex 895918b..fd950d5 100644\n--- a/ibis/backends/impala/tests/test_udf.py\n+++ b/ibis/backends/impala/tests/test_udf.py\n@@ -9,11 +9,11 @@ import ibis\n import ibis.backends.impala as api\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n-import ibis.expr.rules as rules\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.impala import ddl\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import rules\n \n pytest.importorskip(\"impala\")\n \ndiff --git a/ibis/backends/impala/udf.py b/ibis/backends/impala/udf.py\nindex c6f2ef6..8b8b552 100644\n--- a/ibis/backends/impala/udf.py\n+++ b/ibis/backends/impala/udf.py\n@@ -21,7 +21,7 @@ import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.udf.validate as v\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import fixed_arity, sql_type_names\n from ibis.backends.impala.compiler import ImpalaExprTranslator\n \ndiff --git a/ibis/backends/mysql/__init__.py b/ibis/backends/mysql/__init__.py\nindex c0ddacb..50b331a 100644\n--- a/ibis/backends/mysql/__init__.py\n+++ b/ibis/backends/mysql/__init__.py\n@@ -8,7 +8,7 @@ import warnings\n from typing import Literal\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/mysql/compiler.py b/ibis/backends/mysql/compiler.py\nindex 13819cb..7456f71 100644\n--- a/ibis/backends/mysql/compiler.py\n+++ b/ibis/backends/mysql/compiler.py\n@@ -1,7 +1,7 @@\n from __future__ import annotations\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\ndiff --git a/ibis/backends/postgres/tests/test_functions.py b/ibis/backends/postgres/tests/test_functions.py\nindex 33c6d2e..0f377e3 100644\n--- a/ibis/backends/postgres/tests/test_functions.py\n+++ b/ibis/backends/postgres/tests/test_functions.py\n@@ -11,9 +11,9 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis import literal as L\n from ibis.expr.window import rows_with_max_lookback\n \ndiff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py\nindex 1b42080..b994911 100644\n--- a/ibis/backends/pyspark/__init__.py\n+++ b/ibis/backends/pyspark/__init__.py\n@@ -14,8 +14,7 @@ import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.expr.types as types\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.compiler import Compiler, TableSetFormatter\n from ibis.backends.base.sql.ddl import (\n@@ -217,16 +216,16 @@ class Backend(BaseSQLBackend):\n **kwargs: Any,\n ) -> Any:\n \"\"\"Execute an expression.\"\"\"\n- if isinstance(expr, types.Table):\n+ if isinstance(expr, ir.Table):\n return self.compile(expr, timecontext, params, **kwargs).toPandas()\n- elif isinstance(expr, types.Column):\n+ elif isinstance(expr, ir.Column):\n # expression must be named for the projection\n if not expr.has_name():\n expr = expr.name(\"tmp\")\n return self.compile(\n expr.to_projection(), timecontext, params, **kwargs\n ).toPandas()[expr.get_name()]\n- elif isinstance(expr, types.Scalar):\n+ elif isinstance(expr, ir.Scalar):\n compiled = self.compile(expr, timecontext, params, **kwargs)\n if isinstance(compiled, Column):\n # attach result column to a fake DataFrame and\ndiff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py\nindex 0288062..ccc8a97 100644\n--- a/ibis/backends/pyspark/tests/test_ddl.py\n+++ b/ibis/backends/pyspark/tests/test_ddl.py\n@@ -5,7 +5,7 @@ import pytest\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pyspark = pytest.importorskip(\"pyspark\")\ndiff --git a/ibis/backends/sqlite/tests/test_client.py b/ibis/backends/sqlite/tests/test_client.py\nindex 95aa24d..ad64700 100644\n--- a/ibis/backends/sqlite/tests/test_client.py\n+++ b/ibis/backends/sqlite/tests/test_client.py\n@@ -5,8 +5,8 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.types as ir\n+from ibis import config\n \n pytest.importorskip(\"sqlalchemy\")\n \ndiff --git a/ibis/expr/format.py b/ibis/expr/format.py\nindex e3d48cd..85fab3f 100644\n--- a/ibis/expr/format.py\n+++ b/ibis/expr/format.py\n@@ -9,13 +9,13 @@ from typing import Any, Callable, Deque, Iterable, Mapping, Tuple\n import rich.pretty\n \n import ibis\n-import ibis.common.graph as graph\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n import ibis.expr.window as win\n-import ibis.util as util\n+from ibis import util\n+from ibis.common import graph\n \n Aliases = Mapping[ops.TableNode, int]\n Deps = Deque[Tuple[int, ops.TableNode]]\ndiff --git a/ibis/expr/operations/relations.py b/ibis/expr/operations/relations.py\nindex 080ddcd..de44a15 100644\n--- a/ibis/expr/operations/relations.py\n+++ b/ibis/expr/operations/relations.py\n@@ -11,7 +11,7 @@ import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute\n from ibis.expr.deferred import Deferred\n from ibis.expr.operations.core import Named, Node, Value\ndiff --git a/ibis/expr/rules.py b/ibis/expr/rules.py\nindex 9b1a3b7..d40700e 100644\n--- a/ibis/expr/rules.py\n+++ b/ibis/expr/rules.py\n@@ -11,7 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute, optional\n from ibis.common.validators import (\n bool_,\ndiff --git a/ibis/expr/timecontext.py b/ibis/expr/timecontext.py\nindex 7ecd8e7..9620d6c 100644\n--- a/ibis/expr/timecontext.py\n+++ b/ibis/expr/timecontext.py\n@@ -38,8 +38,8 @@ from typing import TYPE_CHECKING, Any\n import numpy as np\n \n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.operations as ops\n+from ibis import config\n \n if TYPE_CHECKING:\n import pandas as pd\ndiff --git a/ibis/expr/types/groupby.py b/ibis/expr/types/groupby.py\nindex 138f92e..97aaaa2 100644\n--- a/ibis/expr/types/groupby.py\n+++ b/ibis/expr/types/groupby.py\n@@ -22,7 +22,7 @@ from typing import Iterable, Sequence\n import ibis.expr.analysis as an\n import ibis.expr.types as ir\n import ibis.expr.window as _window\n-import ibis.util as util\n+from ibis import util\n from ibis.expr.deferred import Deferred\n \n _function_types = tuple(\ndiff --git a/ibis/expr/window.py b/ibis/expr/window.py\nindex 5ef3bb1..3e0efdc 100644\n--- a/ibis/expr/window.py\n+++ b/ibis/expr/window.py\n@@ -11,7 +11,7 @@ import toolz\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.exceptions import IbisInputError\n from ibis.common.grounds import Comparable\n \ndiff --git a/ibis/tests/expr/test_decimal.py b/ibis/tests/expr/test_decimal.py\nindex 85d8eb2..12b809b 100644\n--- a/ibis/tests/expr/test_decimal.py\n+++ b/ibis/tests/expr/test_decimal.py\n@@ -3,10 +3,10 @@ import operator\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_type_metadata(lineitem):\ndiff --git a/ibis/tests/expr/test_interactive.py b/ibis/tests/expr/test_interactive.py\nindex cea1945..0c5613b 100644\n--- a/ibis/tests/expr/test_interactive.py\n+++ b/ibis/tests/expr/test_interactive.py\n@@ -14,7 +14,7 @@\n \n import pytest\n \n-import ibis.config as config\n+from ibis import config\n from ibis.tests.expr.mocks import MockBackend\n \n \ndiff --git a/ibis/tests/expr/test_table.py b/ibis/tests/expr/test_table.py\nindex 04f4a7d..3f77985 100644\n--- a/ibis/tests/expr/test_table.py\n+++ b/ibis/tests/expr/test_table.py\n@@ -10,13 +10,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as an\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n from ibis import literal as L\n from ibis.common.exceptions import RelationError\n+from ibis.expr import api\n from ibis.expr.types import Column, Table\n from ibis.tests.expr.mocks import MockAlchemyBackend, MockBackend\n from ibis.tests.util import assert_equal, assert_pickle_roundtrip\ndiff --git a/ibis/tests/expr/test_temporal.py b/ibis/tests/expr/test_temporal.py\nindex e76e71c..9a0f43f 100644\n--- a/ibis/tests/expr/test_temporal.py\n+++ b/ibis/tests/expr/test_temporal.py\n@@ -5,10 +5,10 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_temporal_literals():\ndiff --git a/ibis/tests/expr/test_timestamp.py b/ibis/tests/expr/test_timestamp.py\nindex 6601c8b..7782787 100644\n--- a/ibis/tests/expr/test_timestamp.py\n+++ b/ibis/tests/expr/test_timestamp.py\n@@ -5,11 +5,11 @@ import pandas as pd\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_field_select(alltypes):\ndiff --git a/ibis/tests/expr/test_value_exprs.py b/ibis/tests/expr/test_value_exprs.py\nindex 4c3d475..9eb247c 100644\n--- a/ibis/tests/expr/test_value_exprs.py\n+++ b/ibis/tests/expr/test_value_exprs.py\n@@ -15,13 +15,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as L\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n from ibis import _, literal\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import api\n from ibis.tests.util import assert_equal\n \n \ndiff --git a/ibis/tests/expr/test_visualize.py b/ibis/tests/expr/test_visualize.py\nindex 5525944..253564f 100644\n--- a/ibis/tests/expr/test_visualize.py\n+++ b/ibis/tests/expr/test_visualize.py\n@@ -9,8 +9,8 @@ import ibis.expr.types as ir\n \n pytest.importorskip('graphviz')\n \n-import ibis.expr.api as api # noqa: E402\n import ibis.expr.visualize as viz # noqa: E402\n+from ibis.expr import api # noqa: E402\n \n pytestmark = pytest.mark.skipif(\n int(os.environ.get('CONDA_BUILD', 0)) == 1, reason='CONDA_BUILD defined'\ndiff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 2ad5453..3aa8c3d 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -15,8 +15,8 @@\n import operator\n \n import pytest\n-import sqlalchemy.sql as sql\n from sqlalchemy import func as F\n+from sqlalchemy import sql\n from sqlalchemy import types as sat\n \n import ibis\ndiff --git a/ibis/tests/util.py b/ibis/tests/util.py\nindex f79d09a..025bfc7 100644\n--- a/ibis/tests/util.py\n+++ b/ibis/tests/util.py\n@@ -5,7 +5,7 @@ from __future__ import annotations\n import pickle\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n \n \n def assert_equal(left, right):\ndiff --git a/pyproject.toml b/pyproject.toml\nindex f2146d4..492ad9e 100644\n--- a/pyproject.toml\n+++ b/pyproject.toml\n@@ -310,6 +310,7 @@ select = [\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n+ \"PLR\", # pylint import style\n \"PLW\", # pylint\n \"RET\", # flake8-return\n \"RUF\", # ruff-specific rules\n"]
5
["6aa63c9b8d4dcdbb401743adc3c9a1020d943250", "a5ecfdf49b0d4c43fbbbf7947be7c0327ccb3415", "cad5e45208346528ad02cd04dcac863f90faa037", "16d4ace80096557fb3fd48396aa09107241c3131", "8d53d724275ebe4b2a0bb0bd7e2c2dfc399e049b"]
["build", "test", "feat", "docs", "refactor"]
remove writers from interface,backup manager can mark inprogress backups as failed,use new freespace config for disk space recory test,permission check,document the use of export buckets for large pre-aggregations Co-authored-by: Ray Paik <[email protected]> Co-authored-by: Artyom Keydunov <[email protected]> Co-authored-by: Dmitry Patsura <[email protected]>
["diff --git a/engine/src/main/java/io/camunda/zeebe/engine/Engine.java b/engine/src/main/java/io/camunda/zeebe/engine/Engine.java\nindex 91f1b41..eb4b9a8 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/Engine.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/Engine.java\n@@ -81,8 +81,6 @@ public class Engine implements RecordProcessor<EngineContext> {\n \n engineContext.setLifecycleListeners(typedRecordProcessors.getLifecycleListeners());\n recordProcessorMap = typedRecordProcessors.getRecordProcessorMap();\n-\n- engineContext.setWriters(writers);\n }\n \n @Override\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/EngineContext.java b/engine/src/main/java/io/camunda/zeebe/engine/EngineContext.java\nindex a8e5538..a27b6e6 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/EngineContext.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/EngineContext.java\n@@ -15,7 +15,6 @@ import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessorListene\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecordProcessorFactory;\n import io.camunda.zeebe.engine.processing.streamprocessor.writers.LegacyTypedResponseWriter;\n import io.camunda.zeebe.engine.processing.streamprocessor.writers.LegacyTypedStreamWriter;\n-import io.camunda.zeebe.engine.processing.streamprocessor.writers.Writers;\n import io.camunda.zeebe.engine.state.EventApplier;\n import io.camunda.zeebe.engine.state.mutable.MutableZeebeState;\n import java.util.Collections;\n@@ -34,7 +33,6 @@ public final class EngineContext {\n private final TypedRecordProcessorFactory typedRecordProcessorFactory;\n private List<StreamProcessorLifecycleAware> lifecycleListeners = Collections.EMPTY_LIST;\n private StreamProcessorListener streamProcessorListener;\n- private Writers writers;\n \n public EngineContext(\n final int partitionId,\n@@ -102,12 +100,4 @@ public final class EngineContext {\n public void setStreamProcessorListener(final StreamProcessorListener streamProcessorListener) {\n this.streamProcessorListener = streamProcessorListener;\n }\n-\n- public Writers getWriters() {\n- return writers;\n- }\n-\n- public void setWriters(final Writers writers) {\n- this.writers = writers;\n- }\n }\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/api/ReadonlyStreamProcessorContext.java b/engine/src/main/java/io/camunda/zeebe/engine/api/ReadonlyStreamProcessorContext.java\nindex f30c7cc..834b421 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/api/ReadonlyStreamProcessorContext.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/api/ReadonlyStreamProcessorContext.java\n@@ -8,7 +8,6 @@\n package io.camunda.zeebe.engine.api;\n \n import io.camunda.zeebe.engine.processing.streamprocessor.writers.LegacyTypedStreamWriter;\n-import io.camunda.zeebe.engine.processing.streamprocessor.writers.Writers;\n import io.camunda.zeebe.engine.state.mutable.MutableZeebeState;\n import io.camunda.zeebe.logstreams.log.LogStream;\n \n@@ -27,11 +26,6 @@ public interface ReadonlyStreamProcessorContext {\n LegacyTypedStreamWriter getLogStreamWriter();\n \n /**\n- * @return the specific writers, like command, response, etc\n- */\n- Writers getWriters();\n-\n- /**\n * @return the state, where the data is stored during processing\n */\n MutableZeebeState getZeebeState();\ndiff --git a/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessor.java b/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessor.java\nindex 844e487..49fd8e2 100755\n--- a/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessor.java\n+++ b/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessor.java\n@@ -346,7 +346,6 @@ public class StreamProcessor extends Actor implements HealthMonitorable, LogReco\n if (listener != null) {\n streamProcessorContext.listener(engineContext.getStreamProcessorListener());\n }\n- streamProcessorContext.writers(engineContext.getWriters());\n }\n \n private long recoverFromSnapshot() {\ndiff --git a/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessorContext.java b/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessorContext.java\nindex d02b273..b527d3c 100644\n--- a/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessorContext.java\n+++ b/engine/src/main/java/io/camunda/zeebe/streamprocessor/StreamProcessorContext.java\n@@ -17,7 +17,6 @@ import io.camunda.zeebe.engine.processing.streamprocessor.writers.CommandRespons\n import io.camunda.zeebe.engine.processing.streamprocessor.writers.LegacyTypedResponseWriterImpl;\n import io.camunda.zeebe.engine.processing.streamprocessor.writers.LegacyTypedStreamWriter;\n import io.camunda.zeebe.engine.processing.streamprocessor.writers.NoopLegacyTypedStreamWriter;\n-import io.camunda.zeebe.engine.processing.streamprocessor.writers.Writers;\n import io.camunda.zeebe.engine.state.EventApplier;\n import io.camunda.zeebe.engine.state.KeyGeneratorControls;\n import io.camunda.zeebe.engine.state.ZeebeDbState;\n@@ -55,7 +54,7 @@ public final class StreamProcessorContext implements ReadonlyStreamProcessorCont\n private StreamProcessorMode streamProcessorMode = StreamProcessorMode.PROCESSING;\n private ProcessingScheduleService processingScheduleService;\n private MutableLastProcessedPositionState lastProcessedPositionState;\n- private Writers writers;\n+\n private LogStreamBatchWriter logStreamBatchWriter;\n private CommandResponseWriter commandResponseWriter;\n \n@@ -85,11 +84,6 @@ public final class StreamProcessorContext implements ReadonlyStreamProcessorCont\n }\n \n @Override\n- public Writers getWriters() {\n- return writers;\n- }\n-\n- @Override\n public MutableZeebeState getZeebeState() {\n return zeebeState;\n }\n@@ -216,10 +210,6 @@ public final class StreamProcessorContext implements ReadonlyStreamProcessorCont\n return streamProcessorMode;\n }\n \n- public void writers(final Writers writers) {\n- this.writers = writers;\n- }\n-\n public void logStreamBatchWriter(final LogStreamBatchWriter batchWriter) {\n logStreamBatchWriter = batchWriter;\n }\n", "diff --git a/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java b/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\nindex b2dfb98..21eaf6d 100644\n--- a/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\n@@ -42,4 +42,6 @@ public interface BackupManager {\n \n /** Close Backup manager */\n ActorFuture<Void> closeAsync();\n+\n+ void failInProgressBackup(long lastCheckpointId);\n }\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\nindex a1e1319..33149ae 100644\n--- a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\n@@ -16,6 +16,7 @@ import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\n import io.camunda.zeebe.snapshots.PersistedSnapshotStore;\n import java.nio.file.Path;\n+import java.util.List;\n import java.util.function.Predicate;\n import org.slf4j.Logger;\n import org.slf4j.LoggerFactory;\n@@ -31,11 +32,13 @@ public final class BackupService extends Actor implements BackupManager {\n private final PersistedSnapshotStore snapshotStore;\n private final Path segmentsDirectory;\n private final Predicate<Path> isSegmentsFile;\n+ private List<Integer> partitionMembers;\n \n public BackupService(\n final int nodeId,\n final int partitionId,\n final int numberOfPartitions,\n+ final List<Integer> partitionMembers,\n final PersistedSnapshotStore snapshotStore,\n final Predicate<Path> isSegmentsFile,\n final Path segmentsDirectory) {\n@@ -48,6 +51,7 @@ public final class BackupService extends Actor implements BackupManager {\n snapshotStore,\n segmentsDirectory,\n isSegmentsFile);\n+ this.partitionMembers = partitionMembers;\n }\n \n public BackupService(\n@@ -122,6 +126,12 @@ public final class BackupService extends Actor implements BackupManager {\n new UnsupportedOperationException(\"Not implemented\"));\n }\n \n+ @Override\n+ public void failInProgressBackup(final long lastCheckpointId) {\n+ internalBackupManager.failInProgressBackups(\n+ partitionId, lastCheckpointId, partitionMembers, actor);\n+ }\n+\n private BackupIdentifierImpl getBackupId(final long checkpointId) {\n return new BackupIdentifierImpl(nodeId, partitionId, checkpointId);\n }\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\nindex e462dd5..f6d76b6 100644\n--- a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\n@@ -9,16 +9,23 @@ package io.camunda.zeebe.backup.management;\n \n import io.camunda.zeebe.backup.api.BackupIdentifier;\n import io.camunda.zeebe.backup.api.BackupStatus;\n+import io.camunda.zeebe.backup.api.BackupStatusCode;\n import io.camunda.zeebe.backup.api.BackupStore;\n+import io.camunda.zeebe.backup.common.BackupIdentifierImpl;\n+import io.camunda.zeebe.backup.processing.state.CheckpointState;\n import io.camunda.zeebe.scheduler.ConcurrencyControl;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\n+import java.util.Collection;\n import java.util.HashSet;\n import java.util.Set;\n import java.util.function.BiConsumer;\n import java.util.function.Consumer;\n+import org.slf4j.Logger;\n+import org.slf4j.LoggerFactory;\n \n final class BackupServiceImpl {\n+ private static final Logger LOG = LoggerFactory.getLogger(BackupServiceImpl.class);\n private final Set<InProgressBackup> backupsInProgress = new HashSet<>();\n private final BackupStore backupStore;\n private ConcurrencyControl concurrencyControl;\n@@ -138,4 +145,48 @@ final class BackupServiceImpl {\n }));\n return future;\n }\n+\n+ void failInProgressBackups(\n+ final int partitionId,\n+ final long lastCheckpointId,\n+ final Collection<Integer> brokers,\n+ final ConcurrencyControl executor) {\n+ if (lastCheckpointId != CheckpointState.NO_CHECKPOINT) {\n+ executor.run(\n+ () -> {\n+ final var backupIds =\n+ brokers.stream()\n+ .map(b -> new BackupIdentifierImpl(b, partitionId, lastCheckpointId))\n+ .toList();\n+ // Fail backups initiated by previous leaders\n+ backupIds.forEach(this::failInProgressBackup);\n+ });\n+ }\n+ }\n+\n+ private void failInProgressBackup(final BackupIdentifier backupId) {\n+ backupStore\n+ .getStatus(backupId)\n+ .thenAccept(\n+ status -> {\n+ if (status.statusCode() == BackupStatusCode.IN_PROGRESS) {\n+ LOG.debug(\n+ \"The backup {} initiated by previous leader is still in progress. Marking it as failed.\",\n+ backupId);\n+ backupStore\n+ .markFailed(backupId)\n+ .thenAccept(ignore -> LOG.trace(\"Marked backup {} as failed.\", backupId))\n+ .exceptionally(\n+ failed -> {\n+ LOG.debug(\"Failed to mark backup {} as failed\", backupId, failed);\n+ return null;\n+ });\n+ }\n+ })\n+ .exceptionally(\n+ error -> {\n+ LOG.debug(\"Failed to retrieve status of backup {}\", backupId);\n+ return null;\n+ });\n+ }\n }\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java b/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\nindex c83fdc1..2899d4d 100644\n--- a/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\n@@ -14,20 +14,24 @@ import io.camunda.zeebe.backup.processing.state.DbCheckpointState;\n import io.camunda.zeebe.engine.api.ProcessingResult;\n import io.camunda.zeebe.engine.api.ProcessingResultBuilder;\n import io.camunda.zeebe.engine.api.ProcessingScheduleService;\n+import io.camunda.zeebe.engine.api.ReadonlyStreamProcessorContext;\n import io.camunda.zeebe.engine.api.RecordProcessor;\n import io.camunda.zeebe.engine.api.RecordProcessorContext;\n+import io.camunda.zeebe.engine.api.StreamProcessorLifecycleAware;\n import io.camunda.zeebe.engine.api.TypedRecord;\n import io.camunda.zeebe.protocol.impl.record.value.management.CheckpointRecord;\n import io.camunda.zeebe.protocol.record.ValueType;\n import io.camunda.zeebe.protocol.record.intent.management.CheckpointIntent;\n import java.time.Duration;\n+import java.util.List;\n import java.util.Set;\n import java.util.concurrent.CopyOnWriteArraySet;\n import org.slf4j.Logger;\n import org.slf4j.LoggerFactory;\n \n /** Process and replays records related to Checkpoint. */\n-public final class CheckpointRecordsProcessor implements RecordProcessor {\n+public final class CheckpointRecordsProcessor\n+ implements RecordProcessor, StreamProcessorLifecycleAware {\n \n private static final Logger LOG = LoggerFactory.getLogger(CheckpointRecordsProcessor.class);\n \n@@ -62,6 +66,8 @@ public final class CheckpointRecordsProcessor implements RecordProcessor {\n checkpointListeners.forEach(\n listener -> listener.onNewCheckpointCreated(checkpointState.getCheckpointId()));\n }\n+\n+ recordProcessorContext.addLifecycleListeners(List.of(this));\n }\n \n @Override\n@@ -126,4 +132,12 @@ public final class CheckpointRecordsProcessor implements RecordProcessor {\n });\n }\n }\n+\n+ @Override\n+ public void onRecovered(final ReadonlyStreamProcessorContext context) {\n+ // After a leader change, the new leader will not continue taking the backup initiated by\n+ // previous leader. So mark them as failed, so that the users do not wait forever for it to be\n+ // completed.\n+ backupManager.failInProgressBackup(checkpointState.getCheckpointId());\n+ }\n }\ndiff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\nindex 3424e19..591e17b 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\n@@ -7,6 +7,7 @@\n */\n package io.camunda.zeebe.broker.system.partitions.impl.steps;\n \n+import io.atomix.cluster.MemberId;\n import io.atomix.raft.RaftServer.Role;\n import io.camunda.zeebe.backup.api.BackupManager;\n import io.camunda.zeebe.backup.management.BackupService;\n@@ -17,6 +18,7 @@ import io.camunda.zeebe.journal.file.SegmentFile;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\n import java.nio.file.Path;\n+import java.util.List;\n import java.util.function.Predicate;\n \n public final class BackupServiceTransitionStep implements PartitionTransitionStep {\n@@ -69,6 +71,7 @@ public final class BackupServiceTransitionStep implements PartitionTransitionSte\n context.getNodeId(),\n context.getPartitionId(),\n context.getBrokerCfg().getCluster().getPartitionsCount(),\n+ getPartitionMembers(context),\n context.getPersistedSnapshotStore(),\n isSegmentsFile,\n context.getRaftPartition().dataDirectory().toPath());\n@@ -90,4 +93,12 @@ public final class BackupServiceTransitionStep implements PartitionTransitionSte\n });\n return installed;\n }\n+\n+ // Brokers which are members of this partition's replication group\n+ private static List<Integer> getPartitionMembers(final PartitionTransitionContext context) {\n+ return context.getRaftPartition().members().stream()\n+ .map(MemberId::id)\n+ .map(Integer::parseInt)\n+ .toList();\n+ }\n }\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n", "diff --git a/server/src/routes/course/index.ts b/server/src/routes/course/index.ts\nindex 557f5fb..bc0e490 100644\n--- a/server/src/routes/course/index.ts\n+++ b/server/src/routes/course/index.ts\n@@ -209,7 +209,7 @@ function addStudentApi(router: Router, logger: ILogger) {\n router.post('/student/:githubId/status', ...mentorValidators, updateStudentStatus(logger));\n router.post('/student/:githubId/status-self', courseGuard, selfUpdateStudentStatus(logger));\n router.get('/student/:githubId/score', courseGuard, getScoreByStudent(logger));\n- router.post('/student/:githubId/certificate', courseManagerGuard, ...validators, postStudentCertificate(logger));\n+ router.post('/student/:githubId/certificate', courseManagerGuard, validateGithubId, postStudentCertificate(logger));\n \n router.get('/students', courseSupervisorGuard, getStudents(logger));\n router.get('/students/csv', courseSupervisorGuard, getStudentsCsv(logger));\n", "diff --git a/docs/content/Caching/Using-Pre-Aggregations.md b/docs/content/Caching/Using-Pre-Aggregations.md\nindex 7882a25..a927241 100644\n--- a/docs/content/Caching/Using-Pre-Aggregations.md\n+++ b/docs/content/Caching/Using-Pre-Aggregations.md\n@@ -65,8 +65,8 @@ In development mode, Cube.js enables background refresh by default and will\n refresh all pre-aggregations marked with the\n [`scheduledRefresh`](/pre-aggregations#scheduled-refresh) parameter.\n \n-Please consult the [Production Checklist][ref-production-checklist-refresh] for\n-best practices on running background refresh in production environments.\n+Please consult the [Production Checklist][ref-prod-list-refresh] for best\n+practices on running background refresh in production environments.\n \n ```js\n cube(`Orders`, {\n@@ -193,10 +193,20 @@ CUBEJS_EXT_DB_TYPE=<SUPPORTED_DB_TYPE_HERE>\n \n <!-- prettier-ignore-start -->\n [[warning |]]\n-| Please be aware of the limitations when using internal and external (outside of Cube Store) pre-aggregations.\n+| Please be aware of the limitations when using internal and external (outside\n+| of Cube Store) pre-aggregations.\n <!-- prettier-ignore-end -->\n \n-![](https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Caching/pre-aggregations.png)\n+<div\n+ style=\"text-align: center\"\n+>\n+ <img\n+ alt=\"Internal vs External vs External with Cube Store diagram\"\n+ src=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Caching/pre-aggregations.png\"\n+ style=\"border: none\"\n+ width=\"100%\"\n+ />\n+</div>\n \n #### Some known limitations when using Postgres/MySQL as a storage layer listed below.\n \n@@ -245,15 +255,75 @@ slow to return results.\n (such as AWS Athena and BigQuery). Repeatedly querying for this data can easily\n rack up costs.\n \n+## Optimizing Pre-Aggregation Build Times\n+\n+<!-- prettier-ignore-start -->\n+[[info | ]]\n+| For ideal performance, pre-aggregations should be built using a dedicated\n+| Refresh Worker. [See here for more details][ref-prod-list-refresh].\n+<!-- prettier-ignore-end -->\n+\n+By default, Cube.js will use the source database as a temporary staging area for\n+writing pre-aggregations to determine column types. The data is loaded back into\n+memory before writing them to Cube Store (or an external database).\n+\n+![](build-regular.png)\n+\n+If the dataset is large (more than 100k rows), then Cube.js can face issues when\n+the Node runtime runs out of memory.\n+\n+### Batching\n+\n+Batching is a more performant strategy where Cube.js sends compressed CSVs for\n+Cube Store to ingest.\n+\n+![](build-batching.png)\n+\n+The performance scales to the amount of memory available on the Cube.js\n+instance. Support is currently available for:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [MySQL][ref-connect-db-mysql]\n+- [Postgres][ref-connect-db-postgres]\n+\n+### Export bucket\n+\n+When dealing with larger pre-aggregations (more than 100k rows), performance can\n+be significantly improved by using an export bucket. This allows the source\n+database to persist data directly into cloud storage, which is then loaded into\n+Cube Store in parallel:\n+\n+![](build-export-bucket.png)\n+\n+Export buckets are currently supported for the following databases:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [Snowflake][ref-connect-db-snowflake]\n+\n+When using cloud storage, it is important to correctly configure any data\n+retention policies to clean up the data in the export bucket as Cube.js does not\n+currently manage this. For most use-cases, 1 day is sufficient.\n+\n [wiki-partitioning]: https://en.wikipedia.org/wiki/Partition_(database)\n+[ref-config-connect-db]: /connecting-to-the-database\n+[ref-config-env]: /reference/environment-variables#cube-store\n+[ref-connect-db-athena]: /connecting-to-the-database#notes-aws-athena\n+[ref-connect-db-redshift]: /connecting-to-the-database#notes-aws-redshift\n+[ref-connect-db-bigquery]: /connecting-to-the-database#notes-google-big-query\n+[ref-connect-db-mysql]: /connecting-to-the-database#notes-my-sql\n+[ref-connect-db-postgres]: /connecting-to-the-database#notes-aws-rds-postgres\n+[ref-connect-db-snowflake]: /connecting-to-the-database#notes-snowflake\n [ref-schema-timedimension]: /types-and-formats#dimensions-types-time\n [ref-preaggs]: /pre-aggregations\n [ref-preagg-sched-refresh]: /pre-aggregations#scheduled-refresh\n [ref-preagg-time-part]: /pre-aggregations#rollup-time-partitioning\n [ref-preagg-segment-part]: /pre-aggregations#rollup-segment-partitioning\n [ref-preaggs-refresh-key]: /pre-aggregations#refresh-key\n+[ref-prod-list-refresh]: /deployment/production-checklist#set-up-refresh-worker\n [ref-config-extdbtype]: /config#options-reference-external-db-type\n [ref-config-driverfactory]: /config#options-reference-driver-factory\n [ref-config-extdriverfactory]: /config#options-reference-external-driver-factory\n-[ref-production-checklist-refresh]:\n- /deployment/production-checklist#set-up-refresh-worker\ndiff --git a/docs/content/Caching/build-batching.png b/docs/content/Caching/build-batching.png\nnew file mode 100755\nindex 0000000..d1e28b3\nBinary files /dev/null and b/docs/content/Caching/build-batching.png differ\ndiff --git a/docs/content/Caching/build-export-bucket.png b/docs/content/Caching/build-export-bucket.png\nnew file mode 100755\nindex 0000000..7da2425\nBinary files /dev/null and b/docs/content/Caching/build-export-bucket.png differ\ndiff --git a/docs/content/Caching/build-regular.png b/docs/content/Caching/build-regular.png\nnew file mode 100644\nindex 0000000..af4c3a2\nBinary files /dev/null and b/docs/content/Caching/build-regular.png differ\ndiff --git a/docs/content/Configuration/Connecting-to-the-Database.md b/docs/content/Configuration/Connecting-to-the-Database.md\nindex 321518f..a16ccc4 100644\n--- a/docs/content/Configuration/Connecting-to-the-Database.md\n+++ b/docs/content/Configuration/Connecting-to-the-Database.md\n@@ -49,20 +49,21 @@ CUBEJS_API_SECRET=secret\n The table below shows which environment variables are used for different\n databases:\n \n-| Database | Credentials |\n-| ------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n-| PostgreSQL, MySQL, AWS Redshift, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n-| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n-| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n-| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, `CUBEJS_DB_BQ_EXPORT_BUCKET` |\n-| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n-| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n-| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n-| SQLite | `CUBEJS_DB_NAME` |\n-| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n-| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n+| Database | Credentials |\n+| ---------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n+| PostgreSQL, MySQL, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| AWS Redshift | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, |\n+| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n+| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n+| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n+| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, |\n+| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n+| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n+| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n+| SQLite | `CUBEJS_DB_NAME` |\n+| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n+| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n \n ## Multiple Databases\n \n@@ -195,18 +196,25 @@ You can learn more about acquiring Google BigQuery credentials\n [here][link-bigquery-getting-started] and [here][link-bigquery-credentials].\n \n You can set the dataset location using the `CUBEJS_DB_BQ_LOCATION` environment\n-variable.\n+variable. All supported regions [can be found\n+here][link-bigquery-regional-locations].\n \n ```dotenv\n CUBEJS_DB_BQ_LOCATION=us-central1\n ```\n \n-You can find more supported regions [here][link-bigquery-regional-locations].\n+#### Configuring an export bucket\n \n-If your pre-aggregations dataset is too big to fit in memory, we **strongly**\n-recommend configuring `CUBEJS_DB_BQ_EXPORT_BUCKET`. This will allow Cube.js to\n-materialize results on an \"export\" bucket which are then loaded into BigQuery,\n-providing better performance.\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| BigQuery only supports using Google Cloud Storage for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### Google Cloud Storage\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n \n <!-- prettier-ignore-start -->\n [[info |]]\n@@ -216,7 +224,8 @@ providing better performance.\n <!-- prettier-ignore-end -->\n \n ```dotenv\n-CUBEJS_DB_BQ_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n ```\n \n ### MSSQL\n@@ -279,6 +288,73 @@ To connect to a Elasticsearch database, use `CUBEJS_DB_URL` with the username\n and password embedded in the URL, if required. If you're not using Elastic\n Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n \n+### AWS Redshift\n+\n+#### Configuring an export bucket\n+\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| AWS Redshift only supports using AWS S3 for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### AWS S3\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+### Snowflake\n+\n+#### Configuring an export bucket\n+\n+Snowflake supports using both AWS S3 and Google Cloud Storage for export bucket\n+functionality.\n+\n+##### AWS S3\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+##### Google Cloud Storage\n+\n+Before configuring Cube.js, an [integration must be created and configured in\n+Snowflake][link-snowflake-gcs-integration]. Take note of the integration name\n+(`gcs_int` from the example link) as you'll need it to configure Cube.js.\n+\n+Once the Snowflake integration is set up, configure Cube.js using the following:\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET=snowflake-export-bucket\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n+CUBEJS_DB_EXPORT_GCS_CREDENTIALS=<BASE64_ENCODED_SERVICE_CREDENTIALS_JSON\n+CUBEJS_DB_EXPORT_INTEGRATION=gcs_int\n+```\n+\n [link-java-guide]:\n https://github.com/cube-js/cube.js/blob/master/packages/cubejs-jdbc-driver/README.md#java-installation\n [link-cubejs-driver-guide]:\n@@ -300,8 +376,11 @@ Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n https://console.cloud.google.com/apis/credentials/serviceaccountkey\n [link-heroku-postgres-issue]:\n https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl\n+[link-snowflake-gcs-integration]:\n+ https://docs.snowflake.com/en/user-guide/data-load-gcs-config.html\n+[link-bigquery-regional-locations]:\n+ https://cloud.google.com/bigquery/docs/locations#regional-locations\n [ref-cubejs-cli]: /using-the-cubejs-cli\n [ref-enabling-ssl]: #enabling-ssl\n [ref-env-var]: /reference/environment-variables#database-connection\n-[link-bigquery-regional-locations]:\n- https://cloud.google.com/bigquery/docs/locations#regional-locations\n+[ref-caching-large-preaggs]: /using-pre-aggregations#large-pre-aggregations\ndiff --git a/docs/content/Configuration/Environment-Variables-Reference.md b/docs/content/Configuration/Environment-Variables-Reference.md\nindex 692d2c7..6888697 100644\n--- a/docs/content/Configuration/Environment-Variables-Reference.md\n+++ b/docs/content/Configuration/Environment-Variables-Reference.md\n@@ -124,6 +124,18 @@ databases [in this guide][link-connecting-to-db].\n | `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` | Snowflake | The password for the private RSA key. Only required for encrypted keys | A valid password for the encrypted private RSA key |\n | `CUBEJS_DB_DATABRICKS_URL` | Databricks | The URL for a JDBC connection | A valid JDBC URL |\n \n+## Export Bucket\n+\n+| Environment variable | Description | Possible Values |\n+| ------------------------------------ | -------------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |\n+| `CUBEJS_DB_EXPORT_BUCKET` | The name of a bucket in cloud storage | `exports-20210505` |\n+| `CUBEJS_DB_EXPORT_BUCKET_TYPE` | The cloud provider where the bucket is hosted | `gcs`, `s3` |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_KEY` | The AWS Access Key ID to use for the export bucket | A valid AWS Access Key ID |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET` | The AWS Secret Access Key to use for the export bucket | A valid AWS Secret Access Key |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_REGION` | The AWS region of the export bucket | [A valid AWS region][link-aws-regions] |\n+| `CUBEJS_DB_EXPORT_GCS_CREDENTIALS` | A Base64 encoded JSON key file for connecting to Google Cloud | A valid Google Cloud JSON key file encoded as a Base64 string |\n+| `CUBEJS_DB_EXPORT_INTEGRATION` | The name of the integration used in the database. Only required when using Snowflake and GCS | A valid string matching the name of the integration in Snowflake |\n+\n ## Cube Store\n \n | Environment variable | Description | Possible Values |\n"]
5
["d2709cab63295109dcd1a49f57da9418110e9044", "fb83ef33b699fd966486a922ba1ade4cf8e55858", "672cd2b9775fb6dac2d522cb3f4469db47c0556b", "33c25b2f59c931a7f4af994365522221a7821dca", "81f37be838d5e3af738908b1bcbf59fea2b45989"]
["refactor", "feat", "test", "fix", "docs"]
add descriptions to buttons on hover,correctly read new last flushed index,updated test to use rows for action items references #279,fixing deploying to kubernetes Signed-off-by: Rajesh Rajendran <[email protected]>,simplify aggregate_columns
["diff --git a/benchmarks/main.mjs b/benchmarks/main.mjs\nindex 0c2dc6b..e2f79d4 100644\n--- a/benchmarks/main.mjs\n+++ b/benchmarks/main.mjs\n@@ -65,8 +65,9 @@ const vnode = () =>\n },\n style: style({ margin: '5px' }),\n disabled,\n+ title: suite.name.split(' | ')[1],\n },\n- [suite.name],\n+ [suite.name.split(' | ')[0]],\n ),\n ),\n m(\ndiff --git a/benchmarks/suites/appendManyRowsToLargeTable.mjs b/benchmarks/suites/appendManyRowsToLargeTable.mjs\nindex e6a034e..7e34ca3 100644\n--- a/benchmarks/suites/appendManyRowsToLargeTable.mjs\n+++ b/benchmarks/suites/appendManyRowsToLargeTable.mjs\n@@ -31,7 +31,9 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('append many rows to large table');\n+const suite = new benchmark.Suite(\n+ 'append many rows to large table | appending 1,000 to a table of 10,000 rows.',\n+);\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/clearRows.mjs b/benchmarks/suites/clearRows.mjs\nindex ad47036..2a7711b 100644\n--- a/benchmarks/suites/clearRows.mjs\n+++ b/benchmarks/suites/clearRows.mjs\n@@ -27,7 +27,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(row);\n });\n \n-const suite = new benchmark.Suite('clear rows');\n+const suite = new benchmark.Suite('clear rows | clearing a table with 1,000 rows');\n \n const hoistedVNode = m('table', undefined, [], VFlags.NO_CHILDREN);\n \ndiff --git a/benchmarks/suites/createManyRows.mjs b/benchmarks/suites/createManyRows.mjs\nindex 578f511..96c7b02 100644\n--- a/benchmarks/suites/createManyRows.mjs\n+++ b/benchmarks/suites/createManyRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create many rows');\n+const suite = new benchmark.Suite('create many rows | creating 10,000 rows');\n \n const hoistedVNode = m(\n 'div',\ndiff --git a/benchmarks/suites/createRows.mjs b/benchmarks/suites/createRows.mjs\nindex bfcc876..4d9ff57 100644\n--- a/benchmarks/suites/createRows.mjs\n+++ b/benchmarks/suites/createRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create rows');\n+const suite = new benchmark.Suite('create rows | creating 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/partialUpdate.mjs b/benchmarks/suites/partialUpdate.mjs\nindex 55948a9..c5f1de3 100644\n--- a/benchmarks/suites/partialUpdate.mjs\n+++ b/benchmarks/suites/partialUpdate.mjs\n@@ -34,7 +34,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('partial update');\n+const suite = new benchmark.Suite('partial update | updating every 10th row for 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/removeRow.mjs b/benchmarks/suites/removeRow.mjs\nindex aeb1e9a..31c7599 100644\n--- a/benchmarks/suites/removeRow.mjs\n+++ b/benchmarks/suites/removeRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('remove row');\n+const suite = new benchmark.Suite('remove row | removing one row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/replaceAllRows.mjs b/benchmarks/suites/replaceAllRows.mjs\nindex 9555ae4..7001667 100644\n--- a/benchmarks/suites/replaceAllRows.mjs\n+++ b/benchmarks/suites/replaceAllRows.mjs\n@@ -41,7 +41,7 @@ data2.forEach(({ id, label }) => {\n \n shuffleArray(data2);\n \n-const suite = new benchmark.Suite('replace all rows');\n+const suite = new benchmark.Suite('replace all rows | updating all 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/selectRow.mjs b/benchmarks/suites/selectRow.mjs\nindex 76be216..de69359 100644\n--- a/benchmarks/suites/selectRow.mjs\n+++ b/benchmarks/suites/selectRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('select row');\n+const suite = new benchmark.Suite('select row | highlighting a selected row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/swapRows.mjs b/benchmarks/suites/swapRows.mjs\nindex 2a91e74..ce52036 100644\n--- a/benchmarks/suites/swapRows.mjs\n+++ b/benchmarks/suites/swapRows.mjs\n@@ -36,7 +36,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('swap rows');\n+const suite = new benchmark.Suite('swap rows | swap 2 rows for table with 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\n", "diff --git a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\nindex 69b06b6..a4fcb77 100644\n--- a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n+++ b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n@@ -112,7 +112,7 @@ public class PartitionRestoreService {\n SegmentedJournal.builder()\n .withDirectory(dataDirectory.toFile())\n .withName(partition.name())\n- .withLastWrittenIndex(-1)\n+ .withLastFlushedIndex(-1)\n .build()) {\n \n resetJournal(checkpointPosition, journal);\n", "diff --git a/ionic/components/card/test/advanced/main.html b/ionic/components/card/test/advanced/main.html\nindex 7c56a7d..c19ea12 100644\n--- a/ionic/components/card/test/advanced/main.html\n+++ b/ionic/components/card/test/advanced/main.html\n@@ -19,16 +19,20 @@\n </p>\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left>\n- <icon star></icon>\n- Star\n- </button>\n- <button clear item-right class=\"activated\">\n- <icon share></icon>\n- Share.activated\n- </button>\n- </ion-item>\n+ <ion-row no-padding>\n+ <ion-col>\n+ <button clear small>\n+ <icon star></icon>\n+ Star\n+ </button>\n+ </ion-col>\n+ <ion-col text-right>\n+ <button clear small class=\"activated\">\n+ <icon share></icon>\n+ Share.activated\n+ </button>\n+ </ion-col>\n+ </ion-row>\n \n </ion-card>\n \n@@ -51,19 +55,24 @@\n <p>Hello. I am a paragraph.</p>\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left danger class=\"activated\">\n- <icon star></icon>\n- Favorite.activated\n- </button>\n- <button clear item-left danger>\n- <icon musical-notes></icon>\n- Listen\n- </button>\n- <ion-note item-right>\n- Right Note\n- </ion-note>\n- </ion-item>\n+ <ion-row center no-padding>\n+ <ion-col width-75>\n+ <button clear small danger class=\"activated\">\n+ <icon star></icon>\n+ Favorite.activated\n+ </button>\n+ <button clear small danger>\n+ <icon musical-notes></icon>\n+ Listen\n+ </button>\n+ </ion-col>\n+ <ion-col text-right>\n+ <button clear small>\n+ <icon share></icon>\n+ Share\n+ </button>\n+ </ion-col>\n+ </ion-row>\n </ion-card>\n \n <ion-card>\n@@ -76,20 +85,27 @@\n This card was breaking the border radius.\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left dark>\n- <icon star></icon>\n- Favorite\n- </button>\n- <button clear item-right dark>\n- <icon musical-notes></icon>\n- Listen\n- </button>\n- <button clear item-right dark>\n- <icon share-alt></icon>\n- Share\n- </button>\n- </ion-item>\n+ <ion-row text-center no-padding>\n+ <ion-col>\n+ <button clear small dark>\n+ <icon star></icon>\n+ Favorite\n+ </button>\n+ </ion-col>\n+\n+ <ion-col>\n+ <button clear small dark>\n+ <icon musical-notes></icon>\n+ Listen\n+ </button>\n+ </ion-col>\n+ <ion-col>\n+ <button clear small dark>\n+ <icon share-alt></icon>\n+ Share\n+ </button>\n+ </ion-col>\n+ </ion-row>\n \n </ion-card>\n \n", "diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml\nindex 7e42967..77e4abf 100644\n--- a/.github/workflows/frontend.yaml\n+++ b/.github/workflows/frontend.yaml\n@@ -22,26 +22,22 @@ jobs:\n ${{ runner.OS }}-build-\n ${{ runner.OS }}-\n \n+ - uses: azure/k8s-set-context@v1\n+ with:\n+ method: kubeconfig\n+ kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.\n+ id: setcontext\n - name: Install\n run: npm install\n \n- - name: Build\n- run: npm run build:staging\n- env:\n- ENVIRONMENT: staging\n-\n- - name: Deploy\n- env:\n- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}\n- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}\n- AWS_REGION: eu-central-1\n- AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}\n+ - name: Build and deploy\n run: |\n- aws configure set default.s3.signature_version s3v4\n- aws --endpoint-url https://${{secrets.DOMAIN_NAME}}/frontend/ s3 cp \\\n- --recursive \\\n- --region \"$AWS_REGION\" \\\n- public s3://$AWS_S3_BUCKET_NAME\n+ cd frontend\n+ bash build.sh\n+ cp -arl public frontend\n+ minio_pod=$(kubectl get po -n db -l app.kubernetes.io/name=minio -n db --output custom-columns=name:.metadata.name | tail -n+2)\n+ kubectl -n db cp frontend $minio_pod:/data/\n+ rm -rf frontend\n \n # - name: Debug Job\n # if: ${{ failure() }}\n", "diff --git a/read_buffer/src/row_group.rs b/read_buffer/src/row_group.rs\nindex 19b0501..f2fbbe3 100644\n--- a/read_buffer/src/row_group.rs\n+++ b/read_buffer/src/row_group.rs\n@@ -924,38 +924,30 @@ impl RowGroup {\n },\n };\n \n- // References to the columns to be used as input for producing the\n- // output aggregates. Also returns the required aggregate type.\n- let input_aggregate_columns = dst\n- .schema\n- .aggregate_columns\n- .iter()\n- .map(|(col_type, agg_type, _)| (self.column_by_name(col_type.as_str()), *agg_type))\n- .collect::<Vec<_>>();\n-\n- let mut output_aggregate_columns = dst\n+ dst.aggregate_cols = dst\n .schema\n .aggregate_columns\n .iter()\n- .map(|(_, agg_type, data_type)| AggregateVec::from((agg_type, data_type)))\n- .collect::<Vec<_>>();\n+ .map(|(col_type, agg_type, data_type)| {\n+ let col = self.column_by_name(col_type.as_str()); // input aggregate column\n+ let mut agg_vec = AggregateVec::from((agg_type, data_type));\n \n- for (i, (col, agg_type)) in input_aggregate_columns.iter().enumerate() {\n- match agg_type {\n- AggregateType::Count => {\n- let value = Value::Scalar(Scalar::U64(col.count(&row_ids) as u64));\n- output_aggregate_columns[i].push(value);\n- }\n- AggregateType::First => unimplemented!(\"First not yet implemented\"),\n- AggregateType::Last => unimplemented!(\"Last not yet implemented\"),\n- AggregateType::Min => output_aggregate_columns[i].push(col.min(&row_ids)),\n- AggregateType::Max => output_aggregate_columns[i].push(col.max(&row_ids)),\n- AggregateType::Sum => {\n- output_aggregate_columns[i].push(Value::Scalar(col.sum(&row_ids)))\n+ // produce single aggregate for the input column subject to a\n+ // predicate filter.\n+ match agg_type {\n+ AggregateType::Count => {\n+ let value = Value::Scalar(Scalar::U64(col.count(&row_ids) as u64));\n+ agg_vec.push(value);\n+ }\n+ AggregateType::First => unimplemented!(\"First not yet implemented\"),\n+ AggregateType::Last => unimplemented!(\"Last not yet implemented\"),\n+ AggregateType::Min => agg_vec.push(col.min(&row_ids)),\n+ AggregateType::Max => agg_vec.push(col.max(&row_ids)),\n+ AggregateType::Sum => agg_vec.push(Value::Scalar(col.sum(&row_ids))),\n }\n- }\n- }\n- dst.aggregate_cols = output_aggregate_columns;\n+ agg_vec\n+ })\n+ .collect::<Vec<_>>();\n }\n \n /// Given the predicate (which may be empty), determine a set of rows\n"]
5
["d8d0ba8ea17ed43a04f90213851d2f27056d8cf0", "5ffc5794808647de14f945141692be26ad143006", "19feaea1885eb015759b5c7a5d785521f2b8a212", "3f2eec37f76c1ad9408e423e49fe5bfe3e17d943", "300ac2e411ef0ebfe1441d42cc88787116a9afa4"]
["feat", "fix", "test", "ci", "refactor"]
skip ruff format in pre-commit ci runner,add user role enum Signed-off-by: Braks <[email protected]>,add jackson dependencies for zb-bpmn-model,remove duplicated variables,Handle different events.
["diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 6193d96..4ba39d6 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -9,6 +9,7 @@ ci:\n - nixpkgs-fmt\n - prettier\n - ruff\n+ - ruff-format\n - shellcheck\n - shfmt\n - statix\n", "diff --git a/packages/nc-gui-v2/lib/enums.ts b/packages/nc-gui-v2/lib/enums.ts\nindex e87b69a..c6751a3 100644\n--- a/packages/nc-gui-v2/lib/enums.ts\n+++ b/packages/nc-gui-v2/lib/enums.ts\n@@ -1,3 +1,9 @@\n+export enum Role {\n+ Super = 'super',\n+ Admin = 'admin',\n+ User = 'user',\n+}\n+\n export enum Language {\n de = 'Deutsch',\n en = 'English',\ndiff --git a/packages/nc-gui-v2/lib/types.ts b/packages/nc-gui-v2/lib/types.ts\nindex bf152c4..dd8a1ce 100644\n--- a/packages/nc-gui-v2/lib/types.ts\n+++ b/packages/nc-gui-v2/lib/types.ts\n@@ -1,11 +1,12 @@\n import type { ComputedRef, ToRefs } from 'vue'\n+import type { Role } from '~/lib/enums'\n \n export interface User {\n id: string\n email: string\n firstname: string | null\n lastname: string | null\n- roles: string[]\n+ roles: (Role | string)[]\n }\n \n export interface State {\n", "diff --git a/parent/pom.xml b/parent/pom.xml\nindex d475131..6290e66 100644\n--- a/parent/pom.xml\n+++ b/parent/pom.xml\n@@ -35,6 +35,7 @@\n <version.mockito>1.8.5</version.mockito>\n <version.assertj>3.8.0</version.assertj>\n <version.msgpack>0.8.13</version.msgpack>\n+ <version.jackson>2.9.0</version.jackson>\n <version.jmh>1.11.2</version.jmh>\n <version.sbe>1.5.6</version.sbe>\n <version.slf4j>1.7.23</version.slf4j>\n@@ -64,6 +65,18 @@\n </dependency>\n \n <dependency>\n+ <groupId>com.fasterxml.jackson.core</groupId>\n+ <artifactId>jackson-databind</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n+ <groupId>com.fasterxml.jackson.dataformat</groupId>\n+ <artifactId>jackson-dataformat-yaml</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n <groupId>org.msgpack</groupId>\n <artifactId>msgpack-core</artifactId>\n <version>${version.msgpack}</version>\n", "diff --git a/packages/core/src/components/item/item.ios.scss b/packages/core/src/components/item/item.ios.scss\nindex 4de5455..6c4d11a 100644\n--- a/packages/core/src/components/item/item.ios.scss\n+++ b/packages/core/src/components/item/item.ios.scss\n@@ -47,15 +47,6 @@ $item-ios-detail-push-color: $list-ios-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-ios-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-ios-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Background for the divider\n-$item-ios-divider-background: #f7f7f7 !default;\n-\n-/// @prop - Color for the divider\n-$item-ios-divider-color: #222 !default;\n-\n-/// @prop - Padding for the divider\n-$item-ios-divider-padding: 5px 15px !default;\n-\n \n // iOS Item\n // --------------------------------------------------\ndiff --git a/packages/core/src/components/item/item.md.scss b/packages/core/src/components/item/item.md.scss\nindex 1dd1800..3dadbc0 100644\n--- a/packages/core/src/components/item/item.md.scss\n+++ b/packages/core/src/components/item/item.md.scss\n@@ -35,21 +35,6 @@ $item-md-detail-push-color: $list-md-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-md-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-md-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Color for the divider\n-$item-md-divider-color: #858585 !default;\n-\n-/// @prop - Background for the divider\n-$item-md-divider-background: #fff !default;\n-\n-/// @prop - Font size for the divider\n-$item-md-divider-font-size: $item-md-body-text-font-size !default;\n-\n-/// @prop - Border bottom for the divider\n-$item-md-divider-border-bottom: 1px solid $list-md-border-color !default;\n-\n-/// @prop - Padding for the divider\n-$item-md-divider-padding: 5px 15px !default;\n-\n \n .item-md {\n @include padding-horizontal($item-md-padding-start, 0);\ndiff --git a/packages/core/src/components/item/item.wp.scss b/packages/core/src/components/item/item.wp.scss\nindex 2c4aae6..07b9266 100644\n--- a/packages/core/src/components/item/item.wp.scss\n+++ b/packages/core/src/components/item/item.wp.scss\n@@ -41,21 +41,6 @@ $item-wp-detail-push-color: $input-wp-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-wp-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-wp-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Color for the divider\n-$item-wp-divider-color: $list-wp-text-color !default;\n-\n-/// @prop - Background for the divider\n-$item-wp-divider-background: #fff !default;\n-\n-/// @prop - Bodrer bottom for the divider\n-$item-wp-divider-border-bottom: 1px solid $list-wp-border-color !default;\n-\n-/// @prop - Font size for the divider\n-$item-wp-divider-font-size: 2rem !default;\n-\n-/// @prop - Padding for the divider\n-$item-wp-divider-padding: 5px 15px !default;\n-\n \n .item-wp {\n @include padding-horizontal($item-wp-padding-start, 0);\n", "diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js\nindex 9075d7c..9f16e67 100644\n--- a/src/notebook/epics/kernel-launch.js\n+++ b/src/notebook/epics/kernel-launch.js\n@@ -113,6 +113,12 @@ export function newKernelObservable(kernelSpec: KernelInfo, cwd: string) {\n observer.error({ type: 'ERROR', payload: error, err: true });\n observer.complete();\n });\n+ spawn.on('exit', () => {\n+ observer.complete();\n+ });\n+ spawn.on('disconnect', () => {\n+ observer.complete();\n+ });\n });\n });\n }\n"]
5
["9117fdedb9b5ce0345c31b3e1fa22ae8554944d4", "176a959eb80d17f9abc5c6b5354e6097be95b42d", "fab09655d5cc30727289cc3f26e5396fce235cd3", "cd7e8c3d3549ea05115b3f02586eeba894d86906", "a280a52c8309465276c3509848ddcddbe19732b6"]
["ci", "feat", "build", "refactor", "fix"]
remove duplicated code,entries updates,fix unstable MessageCorrelationTest,add jackson dependencies for zb-bpmn-model,fix deploy
["diff --git a/packages/core/src/components/action-sheet/action-sheet.tsx b/packages/core/src/components/action-sheet/action-sheet.tsx\nindex 7166508..dad7daf 100644\n--- a/packages/core/src/components/action-sheet/action-sheet.tsx\n+++ b/packages/core/src/components/action-sheet/action-sheet.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, isDef, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -23,15 +23,15 @@ import mdLeaveAnimation from './animations/md.leave';\n })\n export class ActionSheet implements OverlayInterface {\n \n+ private presented = false;\n+\n mode: string;\n color: string;\n-\n- private presented = false;\n- private animation: Animation | null = null;\n+ animation: Animation;\n \n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -178,25 +178,8 @@ export class ActionSheet implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- // Check if prop animate is false or if the config for animate is defined/false\n- if (!this.willAnimate || (isDef(this.config.get('willAnimate')) && this.config.get('willAnimate') === false)) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- this.animation = animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n protected buttonClick(button: ActionSheetButton) {\ndiff --git a/packages/core/src/components/alert/alert.tsx b/packages/core/src/components/alert/alert.tsx\nindex 800b77b..bdf4fc5 100644\n--- a/packages/core/src/components/alert/alert.tsx\n+++ b/packages/core/src/components/alert/alert.tsx\n@@ -1,8 +1,8 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync, autoFocus } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync, autoFocus } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,18 +21,19 @@ import mdLeaveAnimation from './animations/md.leave';\n }\n })\n export class Alert implements OverlayInterface {\n- mode: string;\n- color: string;\n \n private presented = false;\n- private animation: Animation | null = null;\n private activeId: string;\n private inputType: string | null = null;\n private hdrId: string;\n \n+ animation: Animation;\n+ mode: string;\n+ color: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -264,25 +265,10 @@ export class Alert implements OverlayInterface {\n return values;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n-\n private renderCheckbox(inputs: AlertInput[]) {\n if (inputs.length === 0) return null;\n \ndiff --git a/packages/core/src/components/loading/loading.tsx b/packages/core/src/components/loading/loading.tsx\nindex f45eaf1..cc4f511 100644\n--- a/packages/core/src/components/loading/loading.tsx\n+++ b/packages/core/src/components/loading/loading.tsx\n@@ -1,13 +1,13 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n import mdEnterAnimation from './animations/md.enter';\n import mdLeaveAnimation from './animations/md.leave';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n @Component({\n tag: 'ion-loading',\n@@ -21,16 +21,17 @@ import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n })\n \n export class Loading implements OverlayInterface {\n- color: string;\n- mode: string;\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n \n+ animation: Animation;\n+ color: string;\n+ mode: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -199,24 +200,8 @@ export class Loading implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/modal/modal.tsx b/packages/core/src/components/modal/modal.tsx\nindex af50d63..2b7510c 100644\n--- a/packages/core/src/components/modal/modal.tsx\n+++ b/packages/core/src/components/modal/modal.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -25,14 +25,16 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Modal implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n+\n @Prop() overlayId: number;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n \n@@ -208,22 +210,8 @@ export class Modal implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n @Method()\ndiff --git a/packages/core/src/components/picker/picker.tsx b/packages/core/src/components/picker/picker.tsx\nindex 13faa3e..d70381e 100644\n--- a/packages/core/src/components/picker/picker.tsx\n+++ b/packages/core/src/components/picker/picker.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop, State } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,16 +21,17 @@ import iosLeaveAnimation from './animations/ios.leave';\n export class Picker implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n private mode: string;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n @State() private showSpinner: boolean = null;\n @State() private spinner: string;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -231,22 +232,8 @@ export class Picker implements OverlayInterface {\n return this.columns;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n private buttonClick(button: PickerButton) {\ndiff --git a/packages/core/src/components/popover/popover.tsx b/packages/core/src/components/popover/popover.tsx\nindex 65031ff..6a47bf6 100644\n--- a/packages/core/src/components/popover/popover.tsx\n+++ b/packages/core/src/components/popover/popover.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,12 +24,13 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Popover implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n@@ -224,22 +225,8 @@ export class Popover implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.ev).then((animation) => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.ev);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/toast/toast.tsx b/packages/core/src/components/toast/toast.tsx\nindex 1afa318..372070a 100644\n--- a/packages/core/src/components/toast/toast.tsx\n+++ b/packages/core/src/components/toast/toast.tsx\n@@ -1,9 +1,9 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,14 +24,14 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Toast implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation | null;\n \n @Element() private el: HTMLElement;\n \n mode: string;\n color: string;\n+ animation: Animation | null;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -123,6 +123,22 @@ export class Toast implements OverlayInterface {\n */\n @Event() ionToastDidUnload: EventEmitter<ToastEventDetail>;\n \n+ componentDidLoad() {\n+ this.ionToastDidLoad.emit();\n+ }\n+\n+ componentDidUnload() {\n+ this.ionToastDidUnload.emit();\n+ }\n+\n+ @Listen('ionDismiss')\n+ protected onDismiss(ev: UIEvent) {\n+ ev.stopPropagation();\n+ ev.preventDefault();\n+\n+ this.dismiss();\n+ }\n+\n /**\n * Present the toast overlay after it has been created.\n */\n@@ -169,38 +185,8 @@ export class Toast implements OverlayInterface {\n });\n }\n \n- playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.position).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n- }\n-\n- componentDidLoad() {\n- this.ionToastDidLoad.emit();\n- }\n-\n- componentDidUnload() {\n- this.ionToastDidUnload.emit();\n- }\n-\n- @Listen('ionDismiss')\n- protected onDismiss(ev: UIEvent) {\n- ev.stopPropagation();\n- ev.preventDefault();\n-\n- this.dismiss();\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.position);\n }\n \n private wrapperClass(): CssClassMap {\ndiff --git a/packages/core/src/utils/overlays.ts b/packages/core/src/utils/overlays.ts\nindex 8926544..634df43 100644\n--- a/packages/core/src/utils/overlays.ts\n+++ b/packages/core/src/utils/overlays.ts\n@@ -1,3 +1,5 @@\n+import { AnimationBuilder, Animation } from \"..\";\n+import { playAnimationAsync } from \"./helpers\";\n \n let lastId = 1;\n \n@@ -56,8 +58,33 @@ export function removeLastOverlay(overlays: OverlayMap) {\n return toRemove ? toRemove.dismiss() : Promise.resolve();\n }\n \n+export function overlayAnimation(\n+ overlay: OverlayInterface,\n+ animationBuilder: AnimationBuilder,\n+ animate: boolean,\n+ baseEl: HTMLElement,\n+ opts: any\n+): Promise<void> {\n+ if (overlay.animation) {\n+ overlay.animation.destroy();\n+ overlay.animation = null;\n+ }\n+ return overlay.animationCtrl.create(animationBuilder, baseEl, opts).then(animation => {\n+ overlay.animation = animation;\n+ if (!animate) {\n+ animation.duration(0);\n+ }\n+ return playAnimationAsync(animation);\n+ }).then((animation) => {\n+ animation.destroy();\n+ overlay.animation = null;\n+ });\n+}\n+\n export interface OverlayInterface {\n overlayId: number;\n+ animation: Animation;\n+ animationCtrl: HTMLIonAnimationControllerElement;\n \n present(): Promise<void>;\n dismiss(data?: any, role?: string): Promise<void>;\n", "diff --git a/packages/docz-core/src/DataServer.ts b/packages/docz-core/src/DataServer.ts\nindex 0dad341..d1d95fb 100644\n--- a/packages/docz-core/src/DataServer.ts\n+++ b/packages/docz-core/src/DataServer.ts\n@@ -34,13 +34,13 @@ export class DataServer {\n public async processEntries(): Promise<void> {\n const config = this.config\n const entries = new Entries(config)\n- const map = await entries.getMap()\n const watcher = chokidar.watch(this.config.files, {\n ignored: /(^|[\\/\\\\])\\../,\n })\n \n- const handleConnection = (socket: WS) => {\n- const update = this.updateEntries(socket)\n+ const handleConnection = async (socket: WS) => {\n+ const update = this.updateEntries(entries, socket)\n+ const map = await entries.getMap()\n \n watcher.on('change', async () => update(this.config))\n watcher.on('unlink', async () => update(this.config))\n@@ -51,12 +51,14 @@ export class DataServer {\n })\n \n socket.send(this.entriesData(map))\n+ await Entries.writeImports(map)\n }\n \n this.server.on('connection', handleConnection)\n this.server.on('close', () => watcher.close())\n \n- await Entries.write(config, map)\n+ await Entries.writeGenerated(config)\n+ await Entries.writeImports(await entries.getMap())\n }\n \n public async processThemeConfig(): Promise<void> {\n@@ -88,14 +90,16 @@ export class DataServer {\n return this.dataObj('docz.config', config.themeConfig)\n }\n \n- private updateEntries(socket: WS): (config: Config) => Promise<void> {\n+ private updateEntries(\n+ entries: Entries,\n+ socket: WS\n+ ): (config: Config) => Promise<void> {\n return async config => {\n if (isSocketOpened(socket)) {\n- const newEntries = new Entries(config)\n- const newMap = await newEntries.getMap()\n+ const map = await entries.getMap()\n \n- await Entries.rewrite(newMap)\n- socket.send(this.entriesData(newMap))\n+ await Entries.writeImports(map)\n+ socket.send(this.entriesData(map))\n }\n }\n }\ndiff --git a/packages/docz-core/src/Entries.ts b/packages/docz-core/src/Entries.ts\nindex 76178eb..6e1a370 100644\n--- a/packages/docz-core/src/Entries.ts\n+++ b/packages/docz-core/src/Entries.ts\n@@ -77,14 +77,13 @@ const writeImports = async (entries: EntryMap): Promise<void> => {\n export type EntryMap = Record<string, Entry>\n \n export class Entries {\n- public static async write(config: Config, entries: EntryMap): Promise<void> {\n+ public static async writeGenerated(config: Config): Promise<void> {\n mkd(paths.docz)\n await writeGeneratedFiles(config)\n- await writeImports(entries)\n }\n \n- public static async rewrite(map: EntryMap): Promise<void> {\n- await writeImports(map)\n+ public static async writeImports(entries: EntryMap): Promise<void> {\n+ await writeImports(entries)\n }\n \n public all: EntryMap\ndiff --git a/packages/docz-core/templates/app.tpl.js b/packages/docz-core/templates/app.tpl.js\nindex 22ad59b..bbb9081 100644\n--- a/packages/docz-core/templates/app.tpl.js\n+++ b/packages/docz-core/templates/app.tpl.js\n@@ -18,15 +18,6 @@ class App extends React.Component {\n state = {\n config: {},\n entries: {},\n- imports: {},\n- }\n-\n- static getDerivedStateFromProps(nextProps, prevState) {\n- return {\n- config: prevState.config,\n- entries: prevState.entries,\n- imports: nextProps.imports\n- }\n }\n \n async componentDidMount() {\n@@ -44,7 +35,8 @@ class App extends React.Component {\n }\n \n render() {\n- return <Theme {...this.state} wrapper={Wrapper} />\n+ const { imports } = this.props\n+ return <Theme {...this.state} imports={imports} wrapper={Wrapper} />\n }\n }\n \n", "diff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\nindex 0f5fed9..796393c 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\n@@ -27,7 +27,6 @@ import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n import static org.assertj.core.api.Assertions.assertThat;\n import static org.assertj.core.api.Assertions.entry;\n \n-import io.zeebe.UnstableTest;\n import io.zeebe.broker.test.EmbeddedBrokerRule;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n@@ -50,7 +49,6 @@ import org.agrona.DirectBuffer;\n import org.junit.Before;\n import org.junit.Rule;\n import org.junit.Test;\n-import org.junit.experimental.categories.Category;\n import org.junit.rules.RuleChain;\n import org.junit.runner.RunWith;\n import org.junit.runners.Parameterized;\n@@ -165,7 +163,7 @@ public class MessageCorrelationTest {\n \"receive-message\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n \n final SubscribedRecord messageSubscription =\n- findMessageSubscription(testClient, MessageSubscriptionIntent.OPENED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n assertThat(messageSubscription.valueType()).isEqualTo(ValueType.MESSAGE_SUBSCRIPTION);\n assertThat(messageSubscription.recordType()).isEqualTo(RecordType.EVENT);\n assertThat(messageSubscription.value())\n@@ -244,7 +242,7 @@ public class MessageCorrelationTest {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(\"wf\", asMsgPack(\"orderId\", \"order-123\"));\n \n- testClient.receiveFirstWorkflowInstanceEvent(WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n \n // when\n testClient.publishMessage(\"order canceled\", \"order-123\", asMsgPack(\"foo\", \"bar\"));\n@@ -308,13 +306,12 @@ public class MessageCorrelationTest {\n }\n \n @Test\n- @Category(UnstableTest.class) // => https://github.com/zeebe-io/zeebe/issues/1234\n public void shouldCorrelateMessageWithZeroTTL() throws Exception {\n // given\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(\"wf\", asMsgPack(\"orderId\", \"order-123\"));\n \n- testClient.receiveElementInState(\"receive-message\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n \n // when\n testClient.publishMessage(\"order canceled\", \"order-123\", asMsgPack(\"foo\", \"bar\"), 0);\n@@ -499,10 +496,9 @@ public class MessageCorrelationTest {\n .containsEntry(\"activityInstanceKey\", catchEventEntered.key());\n }\n \n- private SubscribedRecord findMessageSubscription(\n- final TestPartitionClient client, final MessageSubscriptionIntent intent)\n+ private SubscribedRecord findMessageSubscription(final MessageSubscriptionIntent intent)\n throws AssertionError {\n- return client\n+ return testClient\n .receiveEvents()\n .filter(intent(intent))\n .findFirst()\n", "diff --git a/parent/pom.xml b/parent/pom.xml\nindex d475131..6290e66 100644\n--- a/parent/pom.xml\n+++ b/parent/pom.xml\n@@ -35,6 +35,7 @@\n <version.mockito>1.8.5</version.mockito>\n <version.assertj>3.8.0</version.assertj>\n <version.msgpack>0.8.13</version.msgpack>\n+ <version.jackson>2.9.0</version.jackson>\n <version.jmh>1.11.2</version.jmh>\n <version.sbe>1.5.6</version.sbe>\n <version.slf4j>1.7.23</version.slf4j>\n@@ -64,6 +65,18 @@\n </dependency>\n \n <dependency>\n+ <groupId>com.fasterxml.jackson.core</groupId>\n+ <artifactId>jackson-databind</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n+ <groupId>com.fasterxml.jackson.dataformat</groupId>\n+ <artifactId>jackson-dataformat-yaml</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n <groupId>org.msgpack</groupId>\n <artifactId>msgpack-core</artifactId>\n <version>${version.msgpack}</version>\n", "diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml\nindex 3830f4c..3b14ee5 100644\n--- a/.github/workflows/deploy.yaml\n+++ b/.github/workflows/deploy.yaml\n@@ -67,7 +67,7 @@ jobs:\n run: aws s3 cp .next/static s3://cdn.rs.school/_next/static/ --recursive --cache-control \"public,max-age=15552000,immutable\"\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -117,7 +117,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -167,7 +167,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n"]
5
["9e3f295bbfd4098ffda1ae6656699f60b86c1f92", "7147ac1f43a3ca454c79a6709dda2c35162ec88c", "98bed2a8137930149559bc1cae9bd34a1a75e556", "fab09655d5cc30727289cc3f26e5396fce235cd3", "7785be09053049b30cf41b420c59f051cd0129fc"]
["refactor", "fix", "test", "build", "ci"]
update version (nightly.0),handle default_branch_monthly_cost having no cost Closes https://github.com/infracost/infracost-gh-action/issues/17,add flag to wait for workflow instance result - with the flag withResult the create instance command will wait for the workflow to complete - optional a list of variable names can be specified to limit the fetched variables,update get-started,change tests to depend on BrokerContext
["diff --git a/Cargo.lock b/Cargo.lock\nindex e6f659c..cf93556 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -126,7 +126,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -134,7 +134,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex c58299b..6e51b6e 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full-repl = [\"erg_common/full-repl\"]\n full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.11\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.11\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.11\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.23\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.24-nightly.0\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 5f005a1..e1a9964 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/scripts/ci/diff.sh b/scripts/ci/diff.sh\nindex 7472273..fa48e4b 100755\n--- a/scripts/ci/diff.sh\n+++ b/scripts/ci/diff.sh\n@@ -112,7 +112,12 @@ echo \"$default_branch_output\" > default_branch_infracost.txt\n default_branch_monthly_cost=$(cat default_branch_infracost.txt | awk '/OVERALL TOTAL/ { gsub(\",\",\"\"); printf(\"%.2f\",$NF) }')\n echo \"::set-output name=default_branch_monthly_cost::$default_branch_monthly_cost\"\n \n-percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+if [ $(echo \"$default_branch_monthly_cost > 0\" | bc -l) = 1 ]; then\n+ percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+else\n+ echo \"Default branch has no cost, setting percent_diff=100 to force a comment\"\n+ percent_diff=100\n+fi\n absolute_percent_diff=$(echo $percent_diff | tr -d -)\n \n if [ $(echo \"$absolute_percent_diff > $percentage_threshold\" | bc -l) = 1 ]; then\n", "diff --git a/clients/zbctl/cmd/createInstance.go b/clients/zbctl/cmd/createInstance.go\nindex 016f115..85ac0be 100644\n--- a/clients/zbctl/cmd/createInstance.go\n+++ b/clients/zbctl/cmd/createInstance.go\n@@ -15,13 +15,15 @@ package cmd\n \n import (\n \t\"github.com/zeebe-io/zeebe/clients/go/commands\"\n+\t\"strings\"\n \n \t\"github.com/spf13/cobra\"\n )\n \n var (\n-\tcreateInstanceVersionFlag int32\n-\tcreateInstanceVariablesFlag string\n+\tcreateInstanceVersionFlag int32\n+\tcreateInstanceVariablesFlag string\n+\tcreateInstanceWithResultFlag []string\n )\n \n var createInstanceCmd = &cobra.Command{\n@@ -39,12 +41,29 @@ var createInstanceCmd = &cobra.Command{\n \t\t\treturn err\n \t\t}\n \n-\t\tresponse, err := zbCmd.Send()\n-\t\tif err != nil {\n-\t\t\treturn err\n-\t\t}\n+\t\tif createInstanceWithResultFlag == nil {\n+\t\t\tresponse, err := zbCmd.Send()\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\treturn printJson(response)\n+\t\t} else {\n+\t\t\tvariableNames := []string{}\n+\t\t\tfor _, variableName := range createInstanceWithResultFlag {\n+\t\t\t\ttrimedVariableName := strings.TrimSpace(variableName)\n+\t\t\t\tif trimedVariableName != \"\" {\n+\t\t\t\t\tvariableNames = append(variableNames, trimedVariableName)\n+\t\t\t\t}\n+\t\t\t}\n+\t\t\tresponse, err := zbCmd.WithResult().FetchVariables(variableNames...).Send()\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\treturn printJson(response)\n \n-\t\treturn printJson(response)\n+\t\t}\n \t},\n }\n \n@@ -58,4 +77,11 @@ func init() {\n \tcreateInstanceCmd.\n \t\tFlags().\n \t\tInt32Var(&createInstanceVersionFlag, \"version\", commands.LatestVersion, \"Specify version of workflow which should be executed.\")\n+\n+\tcreateInstanceCmd.\n+\t\tFlags().\n+\t\tStringSliceVar(&createInstanceWithResultFlag, \"withResult\", nil, \"Specify to await result of workflow, optional a list of variable names can be provided to limit the returned variables\")\n+\n+\t// hack to use --withResult without values\n+\tcreateInstanceCmd.Flag(\"withResult\").NoOptDefVal = \" \"\n }\n", "diff --git a/docs/src/go-client/get-started.md b/docs/src/go-client/get-started.md\nindex 4f4405f..a792e0e 100755\n--- a/docs/src/go-client/get-started.md\n+++ b/docs/src/go-client/get-started.md\n@@ -199,14 +199,12 @@ workflowKey:1 bpmnProcessId:\"order-process\" version:1 workflowInstanceKey:6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n \n ## Work on a task\n@@ -322,7 +320,7 @@ it encounters a problem while processing the job.\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/go-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n When you run the above example you should see similar output:\n \ndiff --git a/docs/src/go-client/java-get-started-monitor-1.gif b/docs/src/go-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/go-client/java-get-started-monitor-2.gif b/docs/src/go-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/go-client/zeebe-monitor-1.png b/docs/src/go-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-2.png b/docs/src/go-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-3.png b/docs/src/go-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-3.png and /dev/null differ\ndiff --git a/docs/src/introduction/quickstart.md b/docs/src/introduction/quickstart.md\nindex 70abacf..68be28b 100644\n--- a/docs/src/introduction/quickstart.md\n+++ b/docs/src/introduction/quickstart.md\n@@ -215,7 +215,7 @@ and completed by a [job worker](/basics/job-workers.html). A job worker is a\n long living process which repeatedly tries to activate jobs for a given job\n type and completes them after executing its business logic. The `zbctl` also\n provides a command to spawn simple job workers using an external command or\n-script. The job worker will receive for every job the payload as JSON object on\n+script. The job worker will receive for every job the workflow instance variables as JSON object on\n `stdin` and has to return its result also as JSON object on `stdout` if it\n handled the job successfully.\n \ndiff --git a/docs/src/java-client/get-started.md b/docs/src/java-client/get-started.md\nindex 54d2208..afc1fd4 100755\n--- a/docs/src/java-client/get-started.md\n+++ b/docs/src/java-client/get-started.md\n@@ -21,9 +21,9 @@ You will be guided through the following steps:\n * [Zeebe Modeler](https://github.com/zeebe-io/zeebe-modeler/releases)\n * [Zeebe Monitor](https://github.com/zeebe-io/zeebe-simple-monitor/releases)\n \n-Before you begin to setup your project please start the broker, i.e. by running the start up script \n-`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the \n-address `localhost:26500`, which is used as contact point in this guide. In case your broker is \n+Before you begin to setup your project please start the broker, i.e. by running the start up script\n+`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the\n+address `localhost:26500`, which is used as contact point in this guide. In case your broker is\n available under another address please adjust the broker contact point when building the client.\n \n ## Set up a project\n@@ -182,14 +182,12 @@ Workflow instance created. Key: 6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n ## Work on a job\n \n@@ -205,12 +203,9 @@ Insert a few service tasks between the start and the end event.\n You need to set the type of each task, which identifies the nature of the work to be performed.\n Set the type of the first task to 'payment-service'.\n \n-Optionally, you can define parameters of the task by adding headers.\n-Add the header `method = VISA` to the first task.\n-\n Save the BPMN diagram and switch back to the main class.\n \n-Add the following lines to create a [job worker][] for the first jobs type:\n+Add the following lines to create a job worker for the first jobs type:\n \n ```java\n package io.zeebe;\n@@ -227,10 +222,7 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Collect money\");\n \n // ...\n \n@@ -252,40 +244,29 @@ public class Application\n Run the program and verify that the job is processed. You should see the output:\n \n ```\n-Collect money using payment method: VISA\n+Collect money\n ```\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/java-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n ## Work with data\n \n-Usually, a workflow is more than just tasks, there is also data flow.\n-The tasks need data as input and in order to produce data.\n+Usually, a workflow is more than just tasks, there is also a data flow. The worker gets the data from the workflow instance to do its work and send the result back to the workflow instance.\n \n-In Zeebe, the data is represented as a JSON document.\n-When you create a workflow instance, then you can pass the data as payload.\n-Within the workflow, you can use input and output mappings on tasks to control the data flow.\n+In Zeebe, the data is stored as key-value-pairs in form of variables. Variables can be set when the workflow instance is created. Within the workflow, variables can be read and modified by workers.\n \n-In our example, we want to create a workflow instance with the following data:\n+In our example, we want to create a workflow instance with the following variables:\n \n ```json\n-{\n- \"orderId\": 31243,\n- \"orderItems\": [435, 182, 376]\n-}\n+\"orderId\": 31243\n+\"orderItems\": [435, 182, 376]\n ```\n \n-The first task should take `orderId` as input and return `totalPrice` as result.\n-\n-Open the BPMN diagram and switch to the input-output-mappings of the first task.\n-Add the input mapping `$.orderId : $.orderId` and the output mapping `$.totalPrice : $.totalPrice`.\n+The first task should read `orderId` as input and return `totalPrice` as result.\n \n-Save the BPMN diagram and go back to the main class.\n-\n-Modify the create command and pass the data as variables.\n-Also, modify the job worker to read the jobs payload and complete the job with payload.\n+Modify the workflow instance create command and pass the data as variables. Also, modify the job worker to read the job variables and complete the job with a result.\n \n ```java\n package io.zeebe;\n@@ -313,23 +294,22 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- final Map<String, Object> payload = job.getPayloadAsMap();\n+ final Map<String, Object> variables = job.getVariablesAsMap();\n \n- System.out.println(\"Process order: \" + payload.get(\"orderId\"));\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Process order: \" + variables.get(\"orderId\"));\n+ System.out.println(\"Collect money\");\n \n // ...\n \n- payload.put(\"totalPrice\", 46.50);\n+ final Map<String, Object> result = new HashMap<>();\n+ result.put(\"totalPrice\", 46.50);\n \n jobClient.newCompleteCommand(job.getKey())\n- .payload(payload)\n+ .variables(result)\n .send()\n .join();\n })\n+ .fetchVariables(\"orderId\")\n .open();\n \n // ...\n@@ -337,16 +317,16 @@ public class Application\n }\n ```\n \n-Run the program and verify that the payload is mapped into the job. You should see the output:\n+Run the program and verify that the variable is read. You should see the output:\n \n ```\n-Process order: {\"orderId\":31243}\n-Collect money using payment method: VISA\n+Process order: 31243\n+Collect money\n ```\n \n-When we have a look at the Zeebe Monitor, then we can see how the payload is modified after the activity:\n+When we have a look at the Zeebe Monitor, then we can see that the variable `totalPrice` is set:\n \n-![zeebe-monitor-step-3](/java-client/zeebe-monitor-3.png)\n+![zeebe-monitor-step-3](/java-client/java-get-started-monitor-3.gif)\n \n ## What's next?\n \ndiff --git a/docs/src/java-client/java-get-started-monitor-1.gif b/docs/src/java-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-2.gif b/docs/src/java-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-3.gif b/docs/src/java-client/java-get-started-monitor-3.gif\nnew file mode 100644\nindex 0000000..1f6cb56\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-3.gif differ\ndiff --git a/docs/src/java-client/zeebe-monitor-1.png b/docs/src/java-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-2.png b/docs/src/java-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-3.png b/docs/src/java-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-3.png and /dev/null differ\n", "diff --git a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\nindex fe4e42d..37c7066 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n@@ -7,20 +7,14 @@\n */\n package io.camunda.zeebe.broker;\n \n-import io.atomix.cluster.AtomixCluster;\n import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupContextImpl;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupProcess;\n-import io.camunda.zeebe.broker.clustering.ClusterServices;\n import io.camunda.zeebe.broker.exporter.repo.ExporterLoadException;\n import io.camunda.zeebe.broker.exporter.repo.ExporterRepository;\n-import io.camunda.zeebe.broker.partitioning.PartitionManager;\n-import io.camunda.zeebe.broker.system.EmbeddedGatewayService;\n import io.camunda.zeebe.broker.system.SystemContext;\n import io.camunda.zeebe.broker.system.configuration.BrokerCfg;\n-import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.broker.system.monitoring.BrokerHealthCheckService;\n-import io.camunda.zeebe.broker.system.monitoring.DiskSpaceUsageMonitor;\n import io.camunda.zeebe.protocol.impl.encoding.BrokerInfo;\n import io.camunda.zeebe.util.LogUtil;\n import io.camunda.zeebe.util.VersionUtil;\n@@ -184,35 +178,15 @@ public final class Broker implements AutoCloseable {\n }\n \n // only used for tests\n- public EmbeddedGatewayService getEmbeddedGatewayService() {\n- return brokerContext.getEmbeddedGatewayService();\n- }\n-\n- public AtomixCluster getAtomixCluster() {\n- return brokerContext.getAtomixCluster();\n- }\n-\n- public ClusterServices getClusterServices() {\n- return brokerContext.getClusterServices();\n- }\n-\n- public DiskSpaceUsageMonitor getDiskSpaceUsageMonitor() {\n- return brokerContext.getDiskSpaceUsageMonitor();\n- }\n-\n- public BrokerAdminService getBrokerAdminService() {\n- return brokerContext.getBrokerAdminService();\n+ public BrokerContext getBrokerContext() {\n+ return brokerContext;\n }\n \n+ // only used for tests\n public SystemContext getSystemContext() {\n return systemContext;\n }\n \n- public PartitionManager getPartitionManager() {\n- return brokerContext.getPartitionManager();\n- }\n- // only used for tests\n-\n /**\n * Temporary helper object. This object is needed during the transition of broker startup/shutdown\n * steps to the new concept. Afterwards, the expectation is that this object will merge with the\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\nindex bda5170..1accbc1 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n@@ -45,11 +45,12 @@ public class BrokerSnapshotTest {\n (RaftPartition)\n brokerRule\n .getBroker()\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(PartitionId.from(PartitionManagerImpl.GROUP_NAME, PARTITION_ID));\n journalReader = raftPartition.getServer().openReader();\n- brokerAdminService = brokerRule.getBroker().getBrokerAdminService();\n+ brokerAdminService = brokerRule.getBroker().getBrokerContext().getBrokerAdminService();\n \n final String contactPoint = NetUtil.toSocketAddressString(brokerRule.getGatewayAddress());\n final ZeebeClientBuilder zeebeClientBuilder =\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\nindex e98e7d2..a831bfe 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n@@ -173,11 +173,11 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n }\n \n public ClusterServices getClusterServices() {\n- return broker.getClusterServices();\n+ return broker.getBrokerContext().getClusterServices();\n }\n \n public AtomixCluster getAtomixCluster() {\n- return broker.getAtomixCluster();\n+ return broker.getBrokerContext().getAtomixCluster();\n }\n \n public InetSocketAddress getGatewayAddress() {\n@@ -245,7 +245,8 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\nindex 890b596..8561cf1 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n@@ -29,6 +29,7 @@ import io.atomix.utils.net.Address;\n import io.camunda.zeebe.broker.Broker;\n import io.camunda.zeebe.broker.PartitionListener;\n import io.camunda.zeebe.broker.SpringBrokerBridge;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.exporter.stream.ExporterDirectorContext;\n import io.camunda.zeebe.broker.partitioning.PartitionManagerImpl;\n import io.camunda.zeebe.broker.system.SystemContext;\n@@ -602,11 +603,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void stepDown(final Broker broker, final int partitionId) {\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == partitionId)\n .map(RaftPartition.class::cast)\n@@ -617,14 +618,14 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void disconnect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).stop().join();\n ((NettyMessagingService) atomix.getMessagingService()).stop().join();\n }\n \n public void connect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).start().join();\n ((NettyMessagingService) atomix.getMessagingService()).start().join();\n@@ -666,11 +667,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n final var broker = brokers.get(expectedLeader);\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == START_PARTITION_ID)\n .map(RaftPartition.class::cast)\n@@ -775,14 +776,15 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void takeSnapshot(final Broker broker) {\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n }\n \n public void triggerAndWaitForSnapshots() {\n // Ensure that the exporter positions are distributed to the followers\n getClock().addTime(ExporterDirectorContext.DEFAULT_DISTRIBUTION_INTERVAL);\n getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::takeSnapshot);\n \n getBrokers()\n@@ -794,7 +796,7 @@ public final class ClusteringRule extends ExternalResource {\n .until(\n () -> {\n // Trigger snapshot again in case snapshot is not already taken\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n return getSnapshot(broker);\n },\n Optional::isPresent));\n@@ -831,7 +833,7 @@ public final class ClusteringRule extends ExternalResource {\n \n private Optional<SnapshotId> getSnapshot(final Broker broker, final int partitionId) {\n \n- final var partitions = broker.getBrokerAdminService().getPartitionStatus();\n+ final var partitions = broker.getBrokerContext().getBrokerAdminService().getPartitionStatus();\n final var partitionStatus = partitions.get(partitionId);\n \n return Optional.ofNullable(partitionStatus)\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\nindex f07961c..d46636b 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n@@ -88,6 +88,7 @@ public class DiskSpaceMonitoringFailOverTest {\n () ->\n clusteringRule\n .getBroker(newLeaderId)\n+ .getBrokerContext()\n .getBrokerAdminService()\n .getPartitionStatus()\n .get(1)\n@@ -96,7 +97,7 @@ public class DiskSpaceMonitoringFailOverTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\nindex 0a02a27..6e93cf9 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n@@ -165,7 +165,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -188,7 +188,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\nindex 9cef5a0..a487729 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n@@ -192,7 +192,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -215,7 +216,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\nindex 2d1e4f0..58f6f16 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n@@ -48,6 +48,7 @@ public class HealthMonitoringTest {\n final var raftPartition =\n (RaftPartition)\n leader\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\nindex 468f83c..7ff03be 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n@@ -11,6 +11,7 @@ import static org.assertj.core.api.Assertions.assertThat;\n \n import io.atomix.raft.RaftServer.Role;\n import io.camunda.zeebe.broker.Broker;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.it.clustering.ClusteringRule;\n@@ -48,7 +49,7 @@ public class BrokerAdminServiceClusterTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -61,7 +62,8 @@ public class BrokerAdminServiceClusterTest {\n // when\n final var followerStatus =\n followers.stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .map(BrokerAdminService::getPartitionStatus)\n .map(status -> status.get(1));\n \n@@ -94,7 +96,8 @@ public class BrokerAdminServiceClusterTest {\n \n // then\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(this::assertThatStatusContainsProcessedPositionInSnapshot);\n }\n \n@@ -102,7 +105,8 @@ public class BrokerAdminServiceClusterTest {\n public void shouldPauseAfterLeaderChange() {\n // given\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::pauseStreamProcessing);\n \n // when\n@@ -113,6 +117,7 @@ public class BrokerAdminServiceClusterTest {\n final var newLeaderAdminService =\n clusteringRule\n .getBroker(clusteringRule.getLeaderForPartition(1).getNodeId())\n+ .getBrokerContext()\n .getBrokerAdminService();\n assertStreamProcessorPhase(newLeaderAdminService, Phase.PAUSED);\n }\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\nindex 5160b50..2185329 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n@@ -41,7 +41,7 @@ public class BrokerAdminServiceTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -144,7 +144,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PAUSED);\n }\n \n@@ -161,7 +161,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PROCESSING);\n }\n \n@@ -176,7 +176,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.PAUSED);\n }\n \n@@ -193,7 +193,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.EXPORTING);\n }\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\nindex d6c8ab3..4582ad2 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n@@ -30,7 +30,7 @@ public class BrokerAdminServiceWithOutExporterTest {\n // given\n final var leader =\n clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- final var leaderAdminService = leader.getBrokerAdminService();\n+ final var leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n // when there are no exporters configured\n // then\n final var partitionStatus = leaderAdminService.getPartitionStatus().get(1);\ndiff --git a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\nindex 36bc0bf..d332201 100644\n--- a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n+++ b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n@@ -240,7 +240,8 @@ public class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \n"]
5
["92e940efeee199b1e0bbbc3c9eea7f3dc8221619", "9474f58b44a35321e9157ca9890c589a7b3729b2", "f3107f1a8eb124b55e775d23416540f49204a19e", "cf6d526123abab2689b24a06aaf03d8e4d6ddff4", "e52a6201093f273add4903dd5f4e55a63539386d"]
["build", "fix", "feat", "docs", "refactor"]
add prewatch script to core,serialize access to StreamObserver,fix node test cases run into infinite loop,export order,rename top-level to connection
["diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n", "diff --git a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\nindex ae2b1c0..8ed64e5 100644\n--- a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n+++ b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n@@ -22,6 +22,7 @@ import io.camunda.zeebe.transport.stream.api.ClientStreamer;\n import io.camunda.zeebe.util.VisibleForTesting;\n import io.grpc.Status;\n import io.grpc.StatusRuntimeException;\n+import io.grpc.internal.SerializingExecutor;\n import io.grpc.stub.ServerCallStreamObserver;\n import io.grpc.stub.StreamObserver;\n import java.util.concurrent.CompletableFuture;\n@@ -83,12 +84,12 @@ public class ClientStreamAdapter {\n @VisibleForTesting(\"Allow unit testing behavior job handling behavior\")\n static final class ClientStreamConsumerImpl implements ClientStreamConsumer {\n private final StreamObserver<ActivatedJob> responseObserver;\n- private final Executor executor;\n+ private final SerializingExecutor executor;\n \n public ClientStreamConsumerImpl(\n final StreamObserver<ActivatedJob> responseObserver, final Executor executor) {\n this.responseObserver = responseObserver;\n- this.executor = executor;\n+ this.executor = new SerializingExecutor(executor);\n }\n \n @Override\n", "diff --git a/packages/designer/tests/document/node/node.test.ts b/packages/designer/tests/document/node/node.test.ts\nindex dd20bd3..113360d 100644\n--- a/packages/designer/tests/document/node/node.test.ts\n+++ b/packages/designer/tests/document/node/node.test.ts\n@@ -26,7 +26,7 @@ import rootHeaderMetadata from '../../fixtures/component-metadata/root-header';\n import rootContentMetadata from '../../fixtures/component-metadata/root-content';\n import rootFooterMetadata from '../../fixtures/component-metadata/root-footer';\n \n-describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n+describe('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n let editor: Editor;\n let designer: Designer;\n let project: Project;\n@@ -474,15 +474,16 @@ describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n it('didDropIn / didDropOut', () => {\n const form = doc.getNode('node_k1ow3cbo');\n designer.createComponentMeta(divMetadata);\n+ designer.createComponentMeta(formMetadata);\n const callbacks = form.componentMeta.getMetadata().configure.advanced?.callbacks;\n const fn1 = callbacks.onNodeAdd = jest.fn();\n const fn2 = callbacks.onNodeRemove = jest.fn();\n const textField = doc.getNode('node_k1ow3cc9');\n form.didDropIn(textField);\n- expect(fn1).toHaveBeenCalledWith(textField, form);\n+ expect(fn1).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n \n form.didDropOut(textField);\n- expect(fn2).toHaveBeenCalledWith(textField, form);\n+ expect(fn2).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n });\n \n it('hover', () => {\n", "diff --git a/src/Object/_api.ts b/src/Object/_api.ts\nindex a1e5c8c..70e9fea 100644\n--- a/src/Object/_api.ts\n+++ b/src/Object/_api.ts\n@@ -38,9 +38,9 @@ export {Optional} from './Optional'\n export {OptionalKeys} from './OptionalKeys'\n export {Overwrite} from './Overwrite'\n export {Partial} from './Partial'\n+export {Path} from './Path'\n export {Paths} from './Paths'\n export {PathValid} from './PathValid'\n-export {Path} from './Path'\n export {Pick} from './Pick'\n export {Readonly} from './Readonly'\n export {ReadonlyKeys} from './ReadonlyKeys'\n", "diff --git a/docs/_quarto.yml b/docs/_quarto.yml\nindex 4e086c7..69471ee 100644\n--- a/docs/_quarto.yml\n+++ b/docs/_quarto.yml\n@@ -140,7 +140,7 @@ website:\n contents:\n - section: Expression API\n contents:\n- - reference/top_level.qmd\n+ - reference/connection.qmd\n - reference/expression-tables.qmd\n - reference/selectors.qmd\n - reference/expression-generic.qmd\n@@ -191,10 +191,10 @@ quartodoc:\n contents:\n - kind: page\n package: ibis\n- path: top_level\n+ path: connection\n summary:\n- name: Top-level APIs\n- desc: Methods and objects available directly on the `ibis` module.\n+ name: Connection API\n+ desc: Create and manage backend connections.\n contents:\n - name: connect\n package: ibis.backends.base\n"]
5
["aa0152baa4376b1087c86499a7c289b668d5ad55", "22044d58302513f5cf22b06151c4a367bbb88f6e", "d2c3f0ba6f85b659b76636a91ea9ab2b5a95a720", "879edb6ed90f88b9ae6a3c2e8878ae1be48e0c88", "9b9cd037645ec716a45b70137f8d2f01ec9ab90c"]
["build", "fix", "test", "refactor", "docs"]
correctly read new last flushed index,add prewatch script to core,cue linter: include all CUE files Signed-off-by: Andrea Luzzardi <[email protected]>,use new freespace config for disk space recory test,expose the means by which we process each reward cycle's affirmation maps at reward cycle boundaries
["diff --git a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\nindex 69b06b6..a4fcb77 100644\n--- a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n+++ b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n@@ -112,7 +112,7 @@ public class PartitionRestoreService {\n SegmentedJournal.builder()\n .withDirectory(dataDirectory.toFile())\n .withName(partition.name())\n- .withLastWrittenIndex(-1)\n+ .withLastFlushedIndex(-1)\n .build()) {\n \n resetJournal(checkpointPosition, journal);\n", "diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n", "diff --git a/ci/cue/lint.cue b/ci/cue/lint.cue\nindex cdda698..6aac265 100644\n--- a/ci/cue/lint.cue\n+++ b/ci/cue/lint.cue\n@@ -39,7 +39,7 @@ import (\n \t\t\t// CACHE: copy only *.cue files\n \t\t\tdocker.#Copy & {\n \t\t\t\tcontents: source\n-\t\t\t\tinclude: [\"*.cue\"]\n+\t\t\t\tinclude: [\"*.cue\", \"**/*.cue\"]\n \t\t\t\tdest: \"/cue\"\n \t\t\t},\n \n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n", "diff --git a/src/burnchains/burnchain.rs b/src/burnchains/burnchain.rs\nindex 92105d6..60c608a 100644\n--- a/src/burnchains/burnchain.rs\n+++ b/src/burnchains/burnchain.rs\n@@ -851,8 +851,26 @@ impl Burnchain {\n );\n \n burnchain_db.store_new_burnchain_block(burnchain, indexer, &block)?;\n- let block_height = block.block_height();\n+ Burnchain::process_affirmation_maps(\n+ burnchain,\n+ burnchain_db,\n+ indexer,\n+ block.block_height(),\n+ )?;\n+\n+ let header = block.header();\n+ Ok(header)\n+ }\n \n+ /// Update the affirmation maps for the previous reward cycle's commits.\n+ /// This is a no-op unless the given burnchain block height falls on a reward cycle boundary. In that\n+ /// case, the previous reward cycle's block commits' affirmation maps are all re-calculated.\n+ pub fn process_affirmation_maps<B: BurnchainHeaderReader>(\n+ burnchain: &Burnchain,\n+ burnchain_db: &mut BurnchainDB,\n+ indexer: &B,\n+ block_height: u64,\n+ ) -> Result<(), burnchain_error> {\n let this_reward_cycle = burnchain\n .block_height_to_reward_cycle(block_height)\n .unwrap_or(0);\n@@ -872,10 +890,7 @@ impl Burnchain {\n );\n update_pox_affirmation_maps(burnchain_db, indexer, prev_reward_cycle, burnchain)?;\n }\n-\n- let header = block.header();\n-\n- Ok(header)\n+ Ok(())\n }\n \n /// Hand off the block to the ChainsCoordinator _and_ process the sortition\n"]
5
["5ffc5794808647de14f945141692be26ad143006", "aa0152baa4376b1087c86499a7c289b668d5ad55", "4c44543a3d9eea37e90a2316717feb01c0e0d83a", "672cd2b9775fb6dac2d522cb3f4469db47c0556b", "d7972da833257c073403dec3c2ac3a7f297e328a"]
["fix", "build", "ci", "test", "refactor"]
do not run tests and build when no changes reported by lerna,add activatedElementInstanceKeys to modification record,Handle different events.,coordinator accepts a request transformer instead of a list of operations The request transformer can generate the operations from the current topology. This helps to - ensure that the operations are generated based on the latest topology. When concurrent changes happens, coordinator can detect it. Previously it was unclear because by the time handle apply operations, the cluster topology might have changed. - return the simulated final topology as part of the result,fix `get-deploy-tags.sh`
["diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml\nindex ca46ca4..d69e581 100644\n--- a/.github/workflows/tests.yml\n+++ b/.github/workflows/tests.yml\n@@ -42,23 +42,25 @@ jobs:\n - name: Set CC Required env vars\n run: export GIT_BRANCH=$GITHUB_HEAD_REF && export GIT_COMMIT_SHA=$(git rev-parse origin/$GITHUB_HEAD_REF)\n \n- - name: Build\n- run: yarn build\n-\n - name: Lint\n run: yarn lint\n \n+ - name: Check for changes\n+ id: changed_packages\n+ run: |\n+ echo \"::set-output name=changed_packages::$(node ./node_modules/.bin/lerna changed -p | wc -l)\"\n+\n+ - name: Build\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n+ run: yarn build\n+\n - name: Test\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n run: |\n yarn run-rs-in-background\n yarn coverage > COVERAGE_RESULT\n echo \"$(cat COVERAGE_RESULT)\"\n \n- - name: Check for changes\n- id: changed_packages\n- run: |\n- echo \"::set-output name=changed_packages::$(node ./node_modules/.bin/lerna changed -p | wc -l)\"\n-\n - name: Release dev version for testing\n if: github.ref == 'refs/heads/master' && matrix.node-version == '15.x' && steps.changed_packages.outputs.changed_packages != '0'\n run: |\n@@ -70,11 +72,13 @@ jobs:\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n \n - name: Coveralls\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n uses: coverallsapp/github-action@master\n with:\n github-token: ${{ secrets.GITHUB_TOKEN }}\n \n - name: Codeclimate\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n uses: paambaati/[email protected]\n env:\n CC_TEST_REPORTER_ID: e2a39c5dc1a13674e97e94a467bacfaec953814982c7de89e9f0b55031e43bd8\n", "diff --git a/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java b/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\nindex 33410da..edd0588 100644\n--- a/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\n+++ b/protocol-impl/src/test/java/io/camunda/zeebe/protocol/impl/JsonSerializableToJsonTest.java\n@@ -787,7 +787,8 @@ final class JsonSerializableToJsonTest {\n }\n }],\n \"elementId\": \"activity\"\n- }]\n+ }],\n+ \"activatedElementInstanceKeys\": []\n }\n \"\"\"\n },\n@@ -803,7 +804,8 @@ final class JsonSerializableToJsonTest {\n {\n \"processInstanceKey\": 1,\n \"terminateInstructions\": [],\n- \"activateInstructions\": []\n+ \"activateInstructions\": [],\n+ \"activatedElementInstanceKeys\": []\n }\n \"\"\"\n },\n", "diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js\nindex 9075d7c..9f16e67 100644\n--- a/src/notebook/epics/kernel-launch.js\n+++ b/src/notebook/epics/kernel-launch.js\n@@ -113,6 +113,12 @@ export function newKernelObservable(kernelSpec: KernelInfo, cwd: string) {\n observer.error({ type: 'ERROR', payload: error, err: true });\n observer.complete();\n });\n+ spawn.on('exit', () => {\n+ observer.complete();\n+ });\n+ spawn.on('disconnect', () => {\n+ observer.complete();\n+ });\n });\n });\n }\n", "diff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\nindex 8bb5c3d..f8f5e24 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.topology.changes;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.topology.state.ClusterTopology;\n import io.camunda.zeebe.topology.state.TopologyChangeOperation;\n+import io.camunda.zeebe.util.Either;\n import java.util.List;\n \n public interface TopologyChangeCoordinator {\n@@ -39,4 +40,16 @@ public interface TopologyChangeCoordinator {\n ActorFuture<Boolean> hasCompletedChanges(final long version);\n \n ActorFuture<ClusterTopology> getCurrentTopology();\n+\n+ ActorFuture<TopologyChangeResult> applyOperations(TopologyChangeRequest request);\n+\n+ record TopologyChangeResult(\n+ ClusterTopology currentTopology,\n+ ClusterTopology finalTopology,\n+ List<TopologyChangeOperation> operations) {}\n+\n+ interface TopologyChangeRequest {\n+ Either<Exception, List<TopologyChangeOperation>> operations(\n+ final ClusterTopology currentTopology);\n+ }\n }\ndiff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\nindex 13ec754..877fc3c 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n@@ -103,6 +103,62 @@ public class TopologyChangeCoordinatorImpl implements TopologyChangeCoordinator \n return clusterTopologyManager.getClusterTopology();\n }\n \n+ @Override\n+ public ActorFuture<TopologyChangeResult> applyOperations(final TopologyChangeRequest request) {\n+ final ActorFuture<TopologyChangeResult> future = executor.createFuture();\n+ clusterTopologyManager\n+ .getClusterTopology()\n+ .onComplete(\n+ (currentClusterTopology, errorOnGettingTopology) -> {\n+ if (errorOnGettingTopology != null) {\n+ future.completeExceptionally(errorOnGettingTopology);\n+ return;\n+ }\n+\n+ final var operationsEither = request.operations(currentClusterTopology);\n+ if (operationsEither.isLeft()) {\n+ future.completeExceptionally(operationsEither.getLeft());\n+ return;\n+ }\n+ final var operations = operationsEither.get();\n+ if (operations.isEmpty()) {\n+ // No operations to apply\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, currentClusterTopology, operations));\n+ return;\n+ }\n+\n+ final ActorFuture<ClusterTopology> validation =\n+ validateTopologyChangeRequest(currentClusterTopology, operations);\n+\n+ validation.onComplete(\n+ (simulatedFinalTopology, validationError) -> {\n+ if (validationError != null) {\n+ future.completeExceptionally(validationError);\n+ return;\n+ }\n+\n+ // if the validation was successful, apply the changes\n+ final ActorFuture<ClusterTopology> applyFuture = executor.createFuture();\n+ applyTopologyChange(\n+ operations, currentClusterTopology, simulatedFinalTopology, applyFuture);\n+\n+ applyFuture.onComplete(\n+ (ignore, error) -> {\n+ if (error == null) {\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, simulatedFinalTopology, operations));\n+ } else {\n+ future.completeExceptionally(error);\n+ }\n+ });\n+ });\n+ });\n+ return future;\n+ }\n+\n private ActorFuture<ClusterTopology> validateTopologyChangeRequest(\n final ClusterTopology currentClusterTopology,\n final List<TopologyChangeOperation> operations) {\n", "diff --git a/.circleci/get-deploy-tags.sh b/.circleci/get-deploy-tags.sh\nindex f80c8cb..7ddfa62 100755\n--- a/.circleci/get-deploy-tags.sh\n+++ b/.circleci/get-deploy-tags.sh\n@@ -20,7 +20,7 @@\n set -euo pipefail\n \n DOCKER_IMAGE_TAG=${1}\n-DOCKER_IMAGE=\"quay.io/influxdb/fusion\"\n+DOCKER_IMAGE=\"quay.io/influxdb/iox\"\n APP_NAME=\"IOx\"\n \n DOCKER_IMAGE_DIGEST=\"$(docker image inspect \"${DOCKER_IMAGE}:${DOCKER_IMAGE_TAG}\" --format '{{ if eq (len .RepoDigests) 1 }}{{index .RepoDigests 0}}{{ end }}')\"\n"]
5
["155611c99fe8692f1afc092599f5a7c727893315", "f7cc7b263afeb27eef393b7497db8dad8ebb0518", "a280a52c8309465276c3509848ddcddbe19732b6", "dec860436916ef216998f80f8b2f9c39d00c064d", "6786fd5955b064021f5b6d6a630453351d683fae"]
["build", "test", "fix", "feat", "ci"]
replace api call which requires auth token in public page re #4694 Signed-off-by: Pranav C <[email protected]>,add classname and style props for Playground,add instruction for finding version,verify process can start at supported element types Verifies a PI can be started at specific element types. The test will deploy the process, start an instance at the desired start element and verify that it has been activated succesfully.,temporary do no run "verify-ffmpeg.py" on Mac CI (#14986)
["diff --git a/packages/nc-gui/composables/useSharedView.ts b/packages/nc-gui/composables/useSharedView.ts\nindex cb0c5ea..f67a6c9 100644\n--- a/packages/nc-gui/composables/useSharedView.ts\n+++ b/packages/nc-gui/composables/useSharedView.ts\n@@ -17,7 +17,7 @@ export function useSharedView() {\n \n const { appInfo } = $(useGlobal())\n \n- const { loadProject } = useProject()\n+ const { project } = useProject()\n \n const appInfoDefaultLimit = appInfo.defaultLimit || 25\n \n@@ -76,7 +76,16 @@ export function useSharedView() {\n \n await setMeta(viewMeta.model)\n \n- await loadProject(true, viewMeta.project_id)\n+ // if project is not defined then set it with an object containing base\n+ if (!project.value?.bases)\n+ project.value = {\n+ bases: [\n+ {\n+ id: viewMeta.base_id,\n+ type: viewMeta.client,\n+ },\n+ ],\n+ }\n \n const relatedMetas = { ...viewMeta.relatedMetas }\n Object.keys(relatedMetas).forEach((key) => setMeta(relatedMetas[key]))\n", "diff --git a/packages/docz-theme-default/src/components/ui/Render.tsx b/packages/docz-theme-default/src/components/ui/Render.tsx\nindex 197359b..943f9ab 100644\n--- a/packages/docz-theme-default/src/components/ui/Render.tsx\n+++ b/packages/docz-theme-default/src/components/ui/Render.tsx\n@@ -24,9 +24,16 @@ const Code = styled('div')`\n }\n `\n \n-export const Render: RenderComponent = ({ component, code }) => (\n+export const Render: RenderComponent = ({\n+ component,\n+ code,\n+ className,\n+ style,\n+}) => (\n <Fragment>\n- <Playground>{component}</Playground>\n+ <Playground className={className} style={style}>\n+ {component}\n+ </Playground>\n <Code>{code}</Code>\n </Fragment>\n )\ndiff --git a/packages/docz/src/components/DocPreview.tsx b/packages/docz/src/components/DocPreview.tsx\nindex ca2d88f..ee8f7c0 100644\n--- a/packages/docz/src/components/DocPreview.tsx\n+++ b/packages/docz/src/components/DocPreview.tsx\n@@ -16,6 +16,8 @@ const DefaultLoading: SFC = () => null\n export type RenderComponent = ComponentType<{\n component: JSX.Element\n code: any\n+ className?: string\n+ style?: any\n }>\n \n export const DefaultRender: RenderComponent = ({ component, code }) => (\ndiff --git a/packages/docz/src/components/Playground.tsx b/packages/docz/src/components/Playground.tsx\nindex d6ff5a3..418c82e 100644\n--- a/packages/docz/src/components/Playground.tsx\n+++ b/packages/docz/src/components/Playground.tsx\n@@ -9,15 +9,21 @@ export interface PlaygroundProps {\n __code: (components: ComponentsMap) => any\n children: any\n components: ComponentsMap\n+ className?: string\n+ style?: any\n }\n \n const BasePlayground: SFC<PlaygroundProps> = ({\n components,\n children,\n __code,\n+ className,\n+ style,\n }) => {\n return components && components.render ? (\n <components.render\n+ className={className}\n+ style={style}\n component={isFn(children) ? children() : children}\n code={__code(components)}\n />\n", "diff --git a/.github/ISSUE_TEMPLATE/_bug_report_chs.md b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\nindex 42a2e0f..44a33db 100644\n--- a/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n+++ b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n@@ -36,7 +36,7 @@ assignees: ''\n ## \u8bbe\u5907\u4fe1\u606f\n - \u64cd\u4f5c\u7cfb\u7edf: [] <!-- \u5982 [Window10] -->\n - \u6d4f\u89c8\u5668\u7248\u672c: [] <!-- \u5982 [Chrome77] -->\n-- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] -->\n+- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] \uff08\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u67e5\u770b\uff09 -->\n \n <!-- \u8bf7\u5728\u4e0b\u65b9 ## \u5f00\u5934\u884c\u4e4b\u95f4\u7684\u7a7a\u767d\u5904\u586b\u5199 -->\n \n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\nnew file mode 100644\nindex 0000000..a505307\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\n@@ -0,0 +1,233 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.processinstance;\n+\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.groups.Tuple.tuple;\n+\n+import io.camunda.zeebe.engine.util.EngineRule;\n+import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.model.bpmn.BpmnModelInstance;\n+import io.camunda.zeebe.protocol.record.Record;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n+import io.camunda.zeebe.test.util.record.RecordingExporterTestWatcher;\n+import java.util.Collection;\n+import java.util.Collections;\n+import java.util.List;\n+import java.util.Map;\n+import org.junit.ClassRule;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.junit.runners.Parameterized;\n+import org.junit.runners.Parameterized.Parameters;\n+\n+@RunWith(Parameterized.class)\n+public class CreateProcessInstanceSupportedElementTest {\n+\n+ @ClassRule public static final EngineRule ENGINE = EngineRule.singlePartition();\n+ private static final String PROCESS_ID = \"processId\";\n+ private static final String CHILD_PROCESS_ID = \"childProcessId\";\n+ private static final String START_ELEMENT_ID = \"startElement\";\n+ private static final String MESSAGE = \"message\";\n+ private static final String JOBTYPE = \"jobtype\";\n+\n+ @Rule\n+ public final RecordingExporterTestWatcher recordingExporterTestWatcher =\n+ new RecordingExporterTestWatcher();\n+\n+ private final Scenario scenario;\n+\n+ public CreateProcessInstanceSupportedElementTest(final Scenario scenario) {\n+ this.scenario = scenario;\n+ }\n+\n+ @Parameters(name = \"{0}\")\n+ public static Collection<Object> scenarios() {\n+ return List.of(\n+ new Scenario(\n+ BpmnElementType.SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .subProcess(START_ELEMENT_ID)\n+ .embeddedSubProcess()\n+ .startEvent()\n+ .subProcessDone()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .eventSubProcess(\n+ START_ELEMENT_ID, e -> e.startEvent().timerWithDuration(\"PT1H\").endEvent())\n+ .startEvent()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_CATCH_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_THROW_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateThrowEvent(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.END_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().endEvent(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SERVICE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.RECEIVE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .receiveTask(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.USER_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().userTask(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.MANUAL_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .manualTask(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EXCLUSIVE_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .exclusiveGateway(START_ELEMENT_ID)\n+ .defaultFlow()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.PARALLEL_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .parallelGateway(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_BASED_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .eventBasedGateway(START_ELEMENT_ID)\n+ .intermediateCatchEvent()\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .moveToLastGateway()\n+ .intermediateCatchEvent()\n+ .timerWithDuration(\"PT1H\")\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.MULTI_INSTANCE_BODY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(\n+ START_ELEMENT_ID,\n+ t ->\n+ t.zeebeJobType(JOBTYPE)\n+ .multiInstance(m -> m.parallel().zeebeInputCollectionExpression(\"[1]\")))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.CALL_ACTIVITY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .callActivity(START_ELEMENT_ID, c -> c.zeebeProcessId(CHILD_PROCESS_ID))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.BUSINESS_RULE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .businessRuleTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SCRIPT_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .scriptTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SEND_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .sendTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()));\n+ }\n+\n+ @Test\n+ public void testProcessInstanceCanStartAtElementType() {\n+ // given\n+ ENGINE.deployment().withXmlResource(scenario.modelInstance).deploy();\n+ if (scenario.type == BpmnElementType.CALL_ACTIVITY) {\n+ ENGINE.deployment().withXmlResource(getChildProcess()).deploy();\n+ }\n+\n+ // when\n+ final long instanceKey =\n+ ENGINE\n+ .processInstance()\n+ .ofBpmnProcessId(PROCESS_ID)\n+ .withStartInstruction(START_ELEMENT_ID)\n+ .withVariables(scenario.variables)\n+ .create();\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.processInstanceRecords()\n+ .withProcessInstanceKey(instanceKey)\n+ .onlyEvents()\n+ .limit(\n+ r ->\n+ r.getValue().getBpmnElementType() == scenario.type\n+ && r.getIntent() == ProcessInstanceIntent.ELEMENT_ACTIVATED))\n+ .extracting(record -> record.getValue().getBpmnElementType(), Record::getIntent)\n+ .containsSequence(\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATED),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATED));\n+ }\n+\n+ private BpmnModelInstance getChildProcess() {\n+ return Bpmn.createExecutableProcess(CHILD_PROCESS_ID).startEvent().endEvent().done();\n+ }\n+\n+ record Scenario(\n+ BpmnElementType type, BpmnModelInstance modelInstance, Map<String, Object> variables) {}\n+}\n", "diff --git a/vsts.yml b/vsts.yml\nindex 6cb0eb3..a058238 100644\n--- a/vsts.yml\n+++ b/vsts.yml\n@@ -86,13 +86,13 @@ jobs:\n killall Electron\n fi\n displayName: Make sure Electron isn't running from previous tests\n-\n- - bash: |\n- cd src\n- python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n- displayName: Verify non proprietary ffmpeg\n- condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n- timeoutInMinutes: 5\n+# FIXME(alexeykuzmin)\n+# - bash: |\n+# cd src\n+# python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n+# displayName: Verify non proprietary ffmpeg\n+# condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n+# timeoutInMinutes: 5\n \n - bash: |\n cd src\n"]
5
["4986a5892fb00bd5a6b2065ad8cfefbc36052dd7", "1b64ed30a2e3c41abf3976efee4c7463044b2ef1", "af0a5f7ab9d71fe20aa0888f682368f32b26fe18", "a5ecfdf49b0d4c43fbbbf7947be7c0327ccb3415", "9187415f5ee35d2e88dd834e413fc16bf19c5db1"]
["fix", "feat", "docs", "test", "ci"]
add clean up test Add another clean up test, which verifies that the state is cleaned up after the timer (non-recurring) is triggered.,remove duplicated code,getBorderSize() missing "width" The correct property name to use is "borderWidth", not just "border". "border" works in Chrome but was breaking in Firefox. Also had to change .ui-grid-header's box-sizing to content-box so IE11 would include the border in height calcs. AND finally IE11 was returning fractional heights so Grid parseInt()s the returned values.,101: fix import key cmd Signed-off-by: Sam Alba <[email protected]>,Template using kube api version Signed-off-by: rjshrjndrn <[email protected]>
["diff --git a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\nindex d36b4c9..ca5047f 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n@@ -630,6 +630,40 @@ public final class ProcessExecutionCleanStateTest {\n }\n \n @Test\n+ public void testProcessWithTriggerTimerStartEvent() {\n+ // given\n+ final var deployment =\n+ engineRule\n+ .deployment()\n+ .withXmlResource(\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .timerWithDate(\"=now() + duration(\\\"PT15S\\\")\")\n+ .endEvent()\n+ .done())\n+ .deploy();\n+\n+ final var processDefinitionKey =\n+ deployment.getValue().getProcessesMetadata().get(0).getProcessDefinitionKey();\n+\n+ // when\n+ engineRule.awaitProcessingOf(\n+ RecordingExporter.timerRecords(TimerIntent.CREATED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .getFirst());\n+\n+ engineRule.increaseTime(Duration.ofSeconds(15));\n+\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n+ // then\n+ assertThatStateIsEmpty();\n+ }\n+\n+ @Test\n public void testProcessWithTimerStartEventRedeployment() {\n // given\n final var deployment =\n", "diff --git a/packages/core/src/components/action-sheet/action-sheet.tsx b/packages/core/src/components/action-sheet/action-sheet.tsx\nindex 7166508..dad7daf 100644\n--- a/packages/core/src/components/action-sheet/action-sheet.tsx\n+++ b/packages/core/src/components/action-sheet/action-sheet.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, isDef, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -23,15 +23,15 @@ import mdLeaveAnimation from './animations/md.leave';\n })\n export class ActionSheet implements OverlayInterface {\n \n+ private presented = false;\n+\n mode: string;\n color: string;\n-\n- private presented = false;\n- private animation: Animation | null = null;\n+ animation: Animation;\n \n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -178,25 +178,8 @@ export class ActionSheet implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- // Check if prop animate is false or if the config for animate is defined/false\n- if (!this.willAnimate || (isDef(this.config.get('willAnimate')) && this.config.get('willAnimate') === false)) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- this.animation = animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n protected buttonClick(button: ActionSheetButton) {\ndiff --git a/packages/core/src/components/alert/alert.tsx b/packages/core/src/components/alert/alert.tsx\nindex 800b77b..bdf4fc5 100644\n--- a/packages/core/src/components/alert/alert.tsx\n+++ b/packages/core/src/components/alert/alert.tsx\n@@ -1,8 +1,8 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync, autoFocus } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync, autoFocus } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,18 +21,19 @@ import mdLeaveAnimation from './animations/md.leave';\n }\n })\n export class Alert implements OverlayInterface {\n- mode: string;\n- color: string;\n \n private presented = false;\n- private animation: Animation | null = null;\n private activeId: string;\n private inputType: string | null = null;\n private hdrId: string;\n \n+ animation: Animation;\n+ mode: string;\n+ color: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -264,25 +265,10 @@ export class Alert implements OverlayInterface {\n return values;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n-\n private renderCheckbox(inputs: AlertInput[]) {\n if (inputs.length === 0) return null;\n \ndiff --git a/packages/core/src/components/loading/loading.tsx b/packages/core/src/components/loading/loading.tsx\nindex f45eaf1..cc4f511 100644\n--- a/packages/core/src/components/loading/loading.tsx\n+++ b/packages/core/src/components/loading/loading.tsx\n@@ -1,13 +1,13 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n import mdEnterAnimation from './animations/md.enter';\n import mdLeaveAnimation from './animations/md.leave';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n @Component({\n tag: 'ion-loading',\n@@ -21,16 +21,17 @@ import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n })\n \n export class Loading implements OverlayInterface {\n- color: string;\n- mode: string;\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n \n+ animation: Animation;\n+ color: string;\n+ mode: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -199,24 +200,8 @@ export class Loading implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/modal/modal.tsx b/packages/core/src/components/modal/modal.tsx\nindex af50d63..2b7510c 100644\n--- a/packages/core/src/components/modal/modal.tsx\n+++ b/packages/core/src/components/modal/modal.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -25,14 +25,16 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Modal implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n+\n @Prop() overlayId: number;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n \n@@ -208,22 +210,8 @@ export class Modal implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n @Method()\ndiff --git a/packages/core/src/components/picker/picker.tsx b/packages/core/src/components/picker/picker.tsx\nindex 13faa3e..d70381e 100644\n--- a/packages/core/src/components/picker/picker.tsx\n+++ b/packages/core/src/components/picker/picker.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop, State } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,16 +21,17 @@ import iosLeaveAnimation from './animations/ios.leave';\n export class Picker implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n private mode: string;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n @State() private showSpinner: boolean = null;\n @State() private spinner: string;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -231,22 +232,8 @@ export class Picker implements OverlayInterface {\n return this.columns;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n private buttonClick(button: PickerButton) {\ndiff --git a/packages/core/src/components/popover/popover.tsx b/packages/core/src/components/popover/popover.tsx\nindex 65031ff..6a47bf6 100644\n--- a/packages/core/src/components/popover/popover.tsx\n+++ b/packages/core/src/components/popover/popover.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,12 +24,13 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Popover implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n@@ -224,22 +225,8 @@ export class Popover implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.ev).then((animation) => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.ev);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/toast/toast.tsx b/packages/core/src/components/toast/toast.tsx\nindex 1afa318..372070a 100644\n--- a/packages/core/src/components/toast/toast.tsx\n+++ b/packages/core/src/components/toast/toast.tsx\n@@ -1,9 +1,9 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,14 +24,14 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Toast implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation | null;\n \n @Element() private el: HTMLElement;\n \n mode: string;\n color: string;\n+ animation: Animation | null;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -123,6 +123,22 @@ export class Toast implements OverlayInterface {\n */\n @Event() ionToastDidUnload: EventEmitter<ToastEventDetail>;\n \n+ componentDidLoad() {\n+ this.ionToastDidLoad.emit();\n+ }\n+\n+ componentDidUnload() {\n+ this.ionToastDidUnload.emit();\n+ }\n+\n+ @Listen('ionDismiss')\n+ protected onDismiss(ev: UIEvent) {\n+ ev.stopPropagation();\n+ ev.preventDefault();\n+\n+ this.dismiss();\n+ }\n+\n /**\n * Present the toast overlay after it has been created.\n */\n@@ -169,38 +185,8 @@ export class Toast implements OverlayInterface {\n });\n }\n \n- playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.position).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n- }\n-\n- componentDidLoad() {\n- this.ionToastDidLoad.emit();\n- }\n-\n- componentDidUnload() {\n- this.ionToastDidUnload.emit();\n- }\n-\n- @Listen('ionDismiss')\n- protected onDismiss(ev: UIEvent) {\n- ev.stopPropagation();\n- ev.preventDefault();\n-\n- this.dismiss();\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.position);\n }\n \n private wrapperClass(): CssClassMap {\ndiff --git a/packages/core/src/utils/overlays.ts b/packages/core/src/utils/overlays.ts\nindex 8926544..634df43 100644\n--- a/packages/core/src/utils/overlays.ts\n+++ b/packages/core/src/utils/overlays.ts\n@@ -1,3 +1,5 @@\n+import { AnimationBuilder, Animation } from \"..\";\n+import { playAnimationAsync } from \"./helpers\";\n \n let lastId = 1;\n \n@@ -56,8 +58,33 @@ export function removeLastOverlay(overlays: OverlayMap) {\n return toRemove ? toRemove.dismiss() : Promise.resolve();\n }\n \n+export function overlayAnimation(\n+ overlay: OverlayInterface,\n+ animationBuilder: AnimationBuilder,\n+ animate: boolean,\n+ baseEl: HTMLElement,\n+ opts: any\n+): Promise<void> {\n+ if (overlay.animation) {\n+ overlay.animation.destroy();\n+ overlay.animation = null;\n+ }\n+ return overlay.animationCtrl.create(animationBuilder, baseEl, opts).then(animation => {\n+ overlay.animation = animation;\n+ if (!animate) {\n+ animation.duration(0);\n+ }\n+ return playAnimationAsync(animation);\n+ }).then((animation) => {\n+ animation.destroy();\n+ overlay.animation = null;\n+ });\n+}\n+\n export interface OverlayInterface {\n overlayId: number;\n+ animation: Animation;\n+ animationCtrl: HTMLIonAnimationControllerElement;\n \n present(): Promise<void>;\n dismiss(data?: any, role?: string): Promise<void>;\n", "diff --git a/src/js/core/factories/Grid.js b/src/js/core/factories/Grid.js\nindex dcf10af..2be7842 100644\n--- a/src/js/core/factories/Grid.js\n+++ b/src/js/core/factories/Grid.js\n@@ -1525,7 +1525,7 @@ angular.module('ui.grid')\n var oldHeaderHeight = container.headerHeight;\n var headerHeight = gridUtil.outerElementHeight(container.header);\n \n- container.headerHeight = headerHeight;\n+ container.headerHeight = parseInt(headerHeight, 10);\n \n if (oldHeaderHeight !== headerHeight) {\n rebuildStyles = true;\n@@ -1534,7 +1534,9 @@ angular.module('ui.grid')\n // Get the \"inner\" header height, that is the height minus the top and bottom borders, if present. We'll use it to make sure all the headers have a consistent height\n var topBorder = gridUtil.getBorderSize(container.header, 'top');\n var bottomBorder = gridUtil.getBorderSize(container.header, 'bottom');\n- var innerHeaderHeight = headerHeight - topBorder - bottomBorder;\n+ var innerHeaderHeight = parseInt(headerHeight - topBorder - bottomBorder, 10);\n+\n+ innerHeaderHeight = innerHeaderHeight < 0 ? 0 : innerHeaderHeight;\n \n container.innerHeaderHeight = innerHeaderHeight;\n \ndiff --git a/src/js/core/services/ui-grid-util.js b/src/js/core/services/ui-grid-util.js\nindex 2c32cbe..cc7c36c 100644\n--- a/src/js/core/services/ui-grid-util.js\n+++ b/src/js/core/services/ui-grid-util.js\n@@ -757,6 +757,8 @@ module.service('gridUtil', ['$log', '$window', '$document', '$http', '$templateC\n borderType = 'border';\n }\n \n+ borderType += 'Width';\n+\n var val = parseInt(styles[borderType], 10);\n \n if (isNaN(val)) {\ndiff --git a/src/less/header.less b/src/less/header.less\nindex 5468a43..de8ff0b 100644\n--- a/src/less/header.less\n+++ b/src/less/header.less\n@@ -7,6 +7,7 @@\n \n .ui-grid-header {\n border-bottom: 1px solid @borderColor;\n+ box-sizing: content-box;;\n }\n \n .ui-grid-top-panel {\n", "diff --git a/docs/learn/101-use.md b/docs/learn/101-use.md\nindex 283c1c1..2ec10f9 100644\n--- a/docs/learn/101-use.md\n+++ b/docs/learn/101-use.md\n@@ -41,8 +41,7 @@ cd ./examples/todoapp\n The example app contains encrypted secrets and other pre-configured inputs, here is how to decrypt them:\n \n ```sh\n-curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\n-dagger input list\n+dagger input list || curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\n ```\n \n **Step 4**: Deploy!\n", "diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml\nindex c014f34..2a12e0d 100644\n--- a/.github/workflows/api-ee.yaml\n+++ b/.github/workflows/api-ee.yaml\n@@ -8,7 +8,7 @@ on:\n default: 'false'\n push:\n branches:\n- - dev\n+ - test_ci\n paths:\n - ee/api/**\n - api/**\n@@ -112,7 +112,8 @@ jobs:\n # Deploy command\n kubectl config set-context --namespace=app --current\n kubectl config get-contexts\n- helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -f -\n+ k_version=$(kubectl version --short 2>/dev/null | awk '/Server/{print $NF}')\n+ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -\n env:\n DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}\n # We're not passing -ee flag, because helm will add that.\n"]
5
["aa746b764e6c54bbbd631210fce35df842d09b12", "9e3f295bbfd4098ffda1ae6656699f60b86c1f92", "174f25214caa10ec643db6c81aaa0f3511bf78f4", "2b01808ec86fe9d8b4a93141a1b7f95e11fd6010", "c3531347fe5a4cc82d426db195026a5bdad15e7a"]
["test", "refactor", "fix", "docs", "ci"]
update deps,add classname and style props for Playground,add missing region to cloudformation_stack_set,refactor generate_completion,fix test Write another record so the commit position is updated and we can take a snapshot
["diff --git a/package.json b/package.json\nindex 3696132..ba002d2 100755\n--- a/package.json\n+++ b/package.json\n@@ -34,7 +34,6 @@\n \"koa-router\": \"~7.4.0\",\n \"koa-session\": \"~5.10.1\",\n \"koa-static\": \"~5.0.0\",\n- \"koa2-ratelimit\": \"~0.8.0\",\n \"koa2-swagger-ui\": \"~2.13.2\",\n \"node-fetch\": \"^2.5.0\",\n \"passport-github\": \"~1.1.0\",\ndiff --git a/src/index.ts b/src/index.ts\nindex 847e8aa..8c9baff 100755\n--- a/src/index.ts\n+++ b/src/index.ts\n@@ -4,7 +4,6 @@ import * as Koa from 'koa';\n import * as bodyParser from 'koa-bodyparser';\n import * as session from 'koa-session';\n import * as serve from 'koa-static';\n-// import { RateLimit } from 'koa2-ratelimit';\n import { Server } from 'net';\n \n import { setupPassport } from './auth';\ndiff --git a/src/typings/koa-router.d.ts b/src/typings/koa-router.d.ts\ndeleted file mode 100644\nindex f891ed8..0000000\n--- a/src/typings/koa-router.d.ts\n+++ /dev/null\n@@ -1,16 +0,0 @@\n-import * as koa from 'koa-router';\n-import { IUserSession } from '../models';\n-import { ILogger } from '../logger';\n-\n-declare module 'koa-router' {\n- export interface IRouterContext {\n- state: { user: IUserSession | undefined };\n- logger: ILogger;\n- }\n-}\n-\n-declare module 'koa' {\n- export interface Context {\n- logger: ILogger;\n- }\n-}\ndiff --git a/src/typings/koa2-ratelimit/index.d.ts b/src/typings/koa2-ratelimit/index.d.ts\ndeleted file mode 100644\nindex cc73228..0000000\n--- a/src/typings/koa2-ratelimit/index.d.ts\n+++ /dev/null\n@@ -1,13 +0,0 @@\n-declare module 'koa2-ratelimit' {\n- import * as Koa from 'koa';\n- interface Props {\n- interval: { min?: number; hour?: number }; // 15 minutes = 15*60*1000\n- max: number;\n- message?: string;\n- prefixKey?: string;\n- getUserId?: (ctx: Koa.Context) => Promise<string>;\n- }\n- export namespace RateLimit {\n- function middleware(options: Props): Koa.Middleware;\n- }\n-}\ndiff --git a/src/typings/mockingoose/index.d.ts b/src/typings/mockingoose/index.d.ts\ndeleted file mode 100644\nindex 757c4e4..0000000\n--- a/src/typings/mockingoose/index.d.ts\n+++ /dev/null\n@@ -1,5 +0,0 @@\n-declare module 'mockingoose' {\n- const mockingoose: any;\n-\n- export default mockingoose;\n-}\n", "diff --git a/packages/docz-theme-default/src/components/ui/Render.tsx b/packages/docz-theme-default/src/components/ui/Render.tsx\nindex 197359b..943f9ab 100644\n--- a/packages/docz-theme-default/src/components/ui/Render.tsx\n+++ b/packages/docz-theme-default/src/components/ui/Render.tsx\n@@ -24,9 +24,16 @@ const Code = styled('div')`\n }\n `\n \n-export const Render: RenderComponent = ({ component, code }) => (\n+export const Render: RenderComponent = ({\n+ component,\n+ code,\n+ className,\n+ style,\n+}) => (\n <Fragment>\n- <Playground>{component}</Playground>\n+ <Playground className={className} style={style}>\n+ {component}\n+ </Playground>\n <Code>{code}</Code>\n </Fragment>\n )\ndiff --git a/packages/docz/src/components/DocPreview.tsx b/packages/docz/src/components/DocPreview.tsx\nindex ca2d88f..ee8f7c0 100644\n--- a/packages/docz/src/components/DocPreview.tsx\n+++ b/packages/docz/src/components/DocPreview.tsx\n@@ -16,6 +16,8 @@ const DefaultLoading: SFC = () => null\n export type RenderComponent = ComponentType<{\n component: JSX.Element\n code: any\n+ className?: string\n+ style?: any\n }>\n \n export const DefaultRender: RenderComponent = ({ component, code }) => (\ndiff --git a/packages/docz/src/components/Playground.tsx b/packages/docz/src/components/Playground.tsx\nindex d6ff5a3..418c82e 100644\n--- a/packages/docz/src/components/Playground.tsx\n+++ b/packages/docz/src/components/Playground.tsx\n@@ -9,15 +9,21 @@ export interface PlaygroundProps {\n __code: (components: ComponentsMap) => any\n children: any\n components: ComponentsMap\n+ className?: string\n+ style?: any\n }\n \n const BasePlayground: SFC<PlaygroundProps> = ({\n components,\n children,\n __code,\n+ className,\n+ style,\n }) => {\n return components && components.render ? (\n <components.render\n+ className={className}\n+ style={style}\n component={isFn(children) ? children() : children}\n code={__code(components)}\n />\n", "diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n", "diff --git a/src/lib.rs b/src/lib.rs\nindex dfd8014..15850f7 100644\n--- a/src/lib.rs\n+++ b/src/lib.rs\n@@ -1,11 +1,106 @@\n //! Generates [Nushell](https://github.com/nushell/nushell) completions for [`clap`](https://github.com/clap-rs/clap) based CLIs\n \n-use clap::Command;\n+use clap::{Arg, Command};\n use clap_complete::Generator;\n \n /// Generate Nushell complete file\n pub struct Nushell;\n \n+enum Argument {\n+ Short(char),\n+ Long(String),\n+ ShortAndLong(char, String),\n+ Positional(String, bool),\n+}\n+\n+struct ArgumentLine {\n+ arg: Argument,\n+ takes_values: bool,\n+ help: Option<String>,\n+}\n+\n+impl From<&Arg> for ArgumentLine {\n+ fn from(arg: &Arg) -> Self {\n+ let takes_values = arg\n+ .get_num_args()\n+ .map(|v| v.takes_values())\n+ .unwrap_or(false);\n+\n+ let help = arg.get_help().map(|s| s.to_string());\n+\n+ if arg.is_positional() {\n+ let id = arg.get_id().to_string();\n+ let required = arg.is_required_set();\n+ let arg = Argument::Positional(id, required);\n+\n+ return Self {\n+ arg,\n+ takes_values,\n+ help,\n+ };\n+ }\n+\n+ let short = arg.get_short();\n+ let long = arg.get_long();\n+\n+ match short {\n+ Some(short) => match long {\n+ Some(long) => Self {\n+ arg: Argument::ShortAndLong(short, long.into()),\n+ takes_values,\n+ help,\n+ },\n+ None => Self {\n+ arg: Argument::Short(short),\n+ takes_values,\n+ help,\n+ },\n+ },\n+ None => match long {\n+ Some(long) => Self {\n+ arg: Argument::Long(long.into()),\n+ takes_values,\n+ help,\n+ },\n+ None => unreachable!(\"No short or long option found\"),\n+ },\n+ }\n+ }\n+}\n+\n+impl ToString for ArgumentLine {\n+ fn to_string(&self) -> String {\n+ let mut s = String::new();\n+\n+ match &self.arg {\n+ Argument::Short(short) => s.push_str(format!(\" -{}\", short).as_str()),\n+ Argument::Long(long) => s.push_str(format!(\" --{}\", long).as_str()),\n+ Argument::ShortAndLong(short, long) => {\n+ s.push_str(format!(\" --{}(-{})\", long, short).as_str())\n+ }\n+ Argument::Positional(positional, required) => {\n+ s.push_str(format!(\" {}\", positional).as_str());\n+\n+ if !*required {\n+ s.push('?');\n+ }\n+ }\n+ }\n+\n+ if self.takes_values {\n+ s.push_str(\": string\");\n+ }\n+\n+ if let Some(help) = &self.help {\n+ s.push_str(format!(\"\\t# {}\", help).as_str());\n+ }\n+\n+ s.push('\\n');\n+\n+ s\n+ }\n+}\n+\n impl Generator for Nushell {\n fn file_name(&self, name: &str) -> String {\n format!(\"{}.nu\", name)\n@@ -37,51 +132,18 @@ fn generate_completion(completions: &mut String, cmd: &Command, is_subcommand: b\n \n let bin_name = cmd.get_bin_name().expect(\"Failed to get bin name\");\n \n- if is_subcommand {\n- completions.push_str(format!(\" export extern \\\"{}\\\" [\\n\", bin_name).as_str());\n+ let name = if is_subcommand {\n+ format!(r#\"\"{}\"\"#, bin_name)\n } else {\n- completions.push_str(format!(\" export extern {} [\\n\", bin_name).as_str());\n- }\n+ bin_name.into()\n+ };\n \n- let mut s = String::new();\n- for arg in cmd.get_arguments() {\n- if arg.is_positional() {\n- s.push_str(format!(\" {}\", arg.get_id()).as_str());\n- if !arg.is_required_set() {\n- s.push('?');\n- }\n- }\n-\n- let long = arg.get_long();\n- if let Some(opt) = long {\n- s.push_str(format!(\" --{}\", opt).as_str());\n- }\n+ completions.push_str(format!(\" export extern {} [\\n\", name).as_str());\n \n- let short = arg.get_short();\n- if let Some(opt) = short {\n- if long.is_some() {\n- s.push_str(format!(\"(-{})\", opt).as_str());\n- } else {\n- s.push_str(format!(\" -{}\", opt).as_str());\n- }\n- }\n-\n- if let Some(v) = arg.get_num_args() {\n- if v.takes_values() {\n- // TODO: add more types?\n- // TODO: add possible values?\n- s.push_str(\": string\");\n- }\n- }\n-\n- if let Some(msg) = arg.get_help() {\n- if arg.is_positional() || long.is_some() || short.is_some() {\n- s.push_str(format!(\"\\t# {}\", msg).as_str());\n- }\n- }\n-\n- s.push('\\n');\n- }\n+ let s: String = cmd\n+ .get_arguments()\n+ .map(|arg| ArgumentLine::from(arg).to_string())\n+ .collect();\n \n completions.push_str(&s);\n completions.push_str(\" ]\\n\\n\");\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\nindex 24f1316..881c727 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n@@ -70,6 +70,14 @@ public class ReaderCloseTest {\n .getCluster()\n .getNodeId();\n clusteringRule.forceClusterToHaveNewLeader(followerId);\n+ // because of https://github.com/camunda-cloud/zeebe/issues/8329\n+ // we need to add another record so we can do a snapshot\n+ clientRule\n+ .getClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"test\")\n+ .correlationKey(\"test\")\n+ .send();\n \n // when\n clusteringRule.triggerAndWaitForSnapshots();\n@@ -78,6 +86,7 @@ public class ReaderCloseTest {\n for (final Broker broker : clusteringRule.getBrokers()) {\n assertThatFilesOfDeletedSegmentsDoesNotExist(broker);\n }\n+ assertThat(leaderId).isNotEqualTo(clusteringRule.getLeaderForPartition(1).getNodeId());\n }\n \n private void assertThatFilesOfDeletedSegmentsDoesNotExist(final Broker leader)\n"]
5
["f46c6c6c26a14312aa05a77ff2a14aebd74e13ac", "1b64ed30a2e3c41abf3976efee4c7463044b2ef1", "304d0588f634e9e72087a706367c53af9c7f7180", "f1bc5a554af4e617c7d7508f7f16f8fd25c78c91", "47df74d40becf915a9d89cdb887abd259b77def0"]
["build", "feat", "fix", "refactor", "test"]
methods for scanning headers,build updates,remove unnecessary lines from verify-wal test,set first-attempt to 5s and subsequent-attempt to 180s by default,add system get version info Fiddle example (#20536)
["diff --git a/src/chainstate/stacks/db/headers.rs b/src/chainstate/stacks/db/headers.rs\nindex a12362d..91eb580 100644\n--- a/src/chainstate/stacks/db/headers.rs\n+++ b/src/chainstate/stacks/db/headers.rs\n@@ -31,8 +31,8 @@ use crate::core::FIRST_BURNCHAIN_CONSENSUS_HASH;\n use crate::core::FIRST_STACKS_BLOCK_HASH;\n use crate::util_lib::db::Error as db_error;\n use crate::util_lib::db::{\n- query_count, query_row, query_row_columns, query_row_panic, query_rows, DBConn, FromColumn,\n- FromRow,\n+ query_count, query_row, query_row_columns, query_row_panic, query_rows, u64_to_sql, DBConn,\n+ FromColumn, FromRow,\n };\n use clarity::vm::costs::ExecutionCost;\n \n@@ -360,4 +360,23 @@ impl StacksChainState {\n }\n Ok(ret)\n }\n+\n+ /// Get all headers at a given Stacks height\n+ pub fn get_all_headers_at_height(\n+ conn: &Connection,\n+ height: u64,\n+ ) -> Result<Vec<StacksHeaderInfo>, Error> {\n+ let qry =\n+ \"SELECT * FROM block_headers WHERE block_height = ?1 ORDER BY burn_header_height DESC\";\n+ let args: &[&dyn ToSql] = &[&u64_to_sql(height)?];\n+ query_rows(conn, qry, args).map_err(|e| e.into())\n+ }\n+\n+ /// Get the highest known header height\n+ pub fn get_max_header_height(conn: &Connection) -> Result<u64, Error> {\n+ let qry = \"SELECT block_height FROM block_headers ORDER BY block_height DESC LIMIT 1\";\n+ query_row(conn, qry, NO_PARAMS)\n+ .map(|row_opt: Option<i64>| row_opt.map(|h| h as u64).unwrap_or(0))\n+ .map_err(|e| e.into())\n+ }\n }\n", "diff --git a/demo/vanilla_new/css/404.min.css b/demo/vanilla_new/css/404.min.css\nindex a3485b4..e69de29 100644\n--- a/demo/vanilla_new/css/404.min.css\n+++ b/demo/vanilla_new/css/404.min.css\n@@ -1 +0,0 @@\n-@import url(https://fonts.googleapis.com/css?family=Share+Tech+Mono%7CSpace+Mono);a,abbr,acronym,address,applet,article,aside,audio,b,big,blockquote,body,canvas,caption,center,cite,code,dd,del,details,dfn,div,dl,dt,em,embed,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,html,i,iframe,img,ins,kbd,label,legend,li,mark,menu,nav,object,ol,output,p,pre,q,ruby,s,samp,section,small,span,strike,strong,sub,summary,sup,table,tbody,td,tfoot,th,thead,time,tr,tt,u,ul,var,video{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:after,blockquote:before,q:after,q:before{content:\"\";content:none}table{border-collapse:collapse;border-spacing:0}body{padding:0;margin:0;font-size:18px}.container{min-height:100vh;position:relative;padding:240px 0;box-sizing:border-box}.overlay{position:absolute;top:0;left:0;width:100%;height:100%;overflow:hidden}.content{position:absolute;top:50%;left:50%;width:100%;transform:translate(-50%,-50%)}.message{text-align:center;color:#000}.message-heading{font-family:\"Share Tech Mono\";font-weight:900;text-transform:uppercase;letter-spacing:.7em;font-size:2rem;padding:0 0 0 1.4em}.message-description{font-family:\"Space Mono\";line-height:42px;font-size:15px;letter-spacing:.15rem;padding:0 20px;max-width:600px;margin:auto}.links{max-width:600px;margin:40px auto 0;text-align:center}.links a{width:170px;display:inline-block;padding:15px 0;margin:0 15px;border:1px solid #000;color:#000;text-decoration:none;font-family:\"Space Mono\";text-transform:uppercase;font-size:11px;letter-spacing:.1rem;position:relative}.links a:before{content:\"\";height:42px;background:#000;position:absolute;top:0;right:0;width:0;transition:all .3s}.links a:after{transition:all .3s;z-index:999;position:relative;content:\"back to hompage\"}.links a:hover:before{width:170px}.links a:hover:after{color:#fff}.links a:nth-child(2){background:#fff;color:#000}.links a:nth-child(2):before{background:#212121;left:0}.links a:nth-child(2):after{content:\"report error\"}.links a:nth-child(2):hover:after{color:#fff}.social{position:absolute;bottom:15px;left:15px}.social-list{margin:0;padding:0;list-style-type:none}.social-list li{display:inline-block;margin:5px 10px}.social-list li a{color:#000}@media (max-width:480px){.message-heading{font-size:1rem;margin-bottom:30px}.message-description{font-size:.7rem;line-height:2rem}.links a{margin:10px;width:280px}.social{left:50%;margin-left:-55px}}\ndiff --git a/demo/vanilla_new/css/main.min.css b/demo/vanilla_new/css/main.min.css\nindex 043eb4f..e69de29 100644\n--- a/demo/vanilla_new/css/main.min.css\n+++ b/demo/vanilla_new/css/main.min.css\n@@ -1 +0,0 @@\n-html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden;background:#000}#floating-container{right:20px;top:20px;position:fixed;z-index:4000}\ndiff --git a/demo/vanilla_new/js/404.min.js b/demo/vanilla_new/js/404.min.js\nindex 3642106..e69de29 100644\n--- a/demo/vanilla_new/js/404.min.js\n+++ b/demo/vanilla_new/js/404.min.js\n@@ -1 +0,0 @@\n-tsParticles.loadJSON(\"tsparticles\",\"/configs/404.json\");\ndiff --git a/website/css/404.min.css b/website/css/404.min.css\nindex a3485b4..e69de29 100644\n--- a/website/css/404.min.css\n+++ b/website/css/404.min.css\n@@ -1 +0,0 @@\n-@import url(https://fonts.googleapis.com/css?family=Share+Tech+Mono%7CSpace+Mono);a,abbr,acronym,address,applet,article,aside,audio,b,big,blockquote,body,canvas,caption,center,cite,code,dd,del,details,dfn,div,dl,dt,em,embed,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,html,i,iframe,img,ins,kbd,label,legend,li,mark,menu,nav,object,ol,output,p,pre,q,ruby,s,samp,section,small,span,strike,strong,sub,summary,sup,table,tbody,td,tfoot,th,thead,time,tr,tt,u,ul,var,video{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:after,blockquote:before,q:after,q:before{content:\"\";content:none}table{border-collapse:collapse;border-spacing:0}body{padding:0;margin:0;font-size:18px}.container{min-height:100vh;position:relative;padding:240px 0;box-sizing:border-box}.overlay{position:absolute;top:0;left:0;width:100%;height:100%;overflow:hidden}.content{position:absolute;top:50%;left:50%;width:100%;transform:translate(-50%,-50%)}.message{text-align:center;color:#000}.message-heading{font-family:\"Share Tech Mono\";font-weight:900;text-transform:uppercase;letter-spacing:.7em;font-size:2rem;padding:0 0 0 1.4em}.message-description{font-family:\"Space Mono\";line-height:42px;font-size:15px;letter-spacing:.15rem;padding:0 20px;max-width:600px;margin:auto}.links{max-width:600px;margin:40px auto 0;text-align:center}.links a{width:170px;display:inline-block;padding:15px 0;margin:0 15px;border:1px solid #000;color:#000;text-decoration:none;font-family:\"Space Mono\";text-transform:uppercase;font-size:11px;letter-spacing:.1rem;position:relative}.links a:before{content:\"\";height:42px;background:#000;position:absolute;top:0;right:0;width:0;transition:all .3s}.links a:after{transition:all .3s;z-index:999;position:relative;content:\"back to hompage\"}.links a:hover:before{width:170px}.links a:hover:after{color:#fff}.links a:nth-child(2){background:#fff;color:#000}.links a:nth-child(2):before{background:#212121;left:0}.links a:nth-child(2):after{content:\"report error\"}.links a:nth-child(2):hover:after{color:#fff}.social{position:absolute;bottom:15px;left:15px}.social-list{margin:0;padding:0;list-style-type:none}.social-list li{display:inline-block;margin:5px 10px}.social-list li a{color:#000}@media (max-width:480px){.message-heading{font-size:1rem;margin-bottom:30px}.message-description{font-size:.7rem;line-height:2rem}.links a{margin:10px;width:280px}.social{left:50%;margin-left:-55px}}\ndiff --git a/website/css/main.min.css b/website/css/main.min.css\nindex 818002f..e69de29 100644\n--- a/website/css/main.min.css\n+++ b/website/css/main.min.css\n@@ -1 +0,0 @@\n-@font-face{font-family:Polya;src:url(https://raw.githubusercontent.com/matteobruni/tsparticles/gh-pages/fonts/Polya.otf)}html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden;background:#000}.github{bottom:10px;right:10px;padding:0 12px 6px;position:fixed;border-radius:10px;background:#fff;border:1px solid #000}.github a{color:#000}.github a:active,.github a:hover,.github a:link,.github a:visited{color:#000;text-decoration:none}.github img{height:30px}.github #gh-project{font-size:20px;padding-left:5px;font-weight:700;vertical-align:bottom}.toggle-sidebar{top:50%;left:0;font-size:20px;color:#000;position:absolute;padding:3px;border-top-right-radius:5px;border-bottom-right-radius:5px;background:#e7e7e7;border:1px solid #000;border-left:none}#editor{background:#fff}[hidden]{display:none}#repulse-div{width:200px;height:200px;background-color:rgba(255,255,255,.5);border-radius:100px;position:absolute;top:50%;left:50%;margin-left:-100px;margin-top:-100px;z-index:200}@media (min-width:1600px) and (-webkit-device-pixel-ratio:1){.col-xxl-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}}.btn-react{color:#fff;background-color:#61dafb;border-color:#fff}.btn-react:hover{color:#fff;background-color:#5aa3c4;border-color:#ccc}.btn-react.focus,.btn-react:focus{color:#fff;background-color:#5aa3c4;border-color:#ccc;box-shadow:0 0 0 .2rem rgba(90,163,196,.5)}.btn-react.disabled,.btn-react:disabled{color:#fff;background-color:#61dafb;border-color:#ccc}.btn-react:not(:disabled):not(.disabled).active,.btn-react:not(:disabled):not(.disabled):active,.show>.btn-react.dropdown-toggle{color:#fff;background-color:#5aa3c4;border-color:#ccc}.btn-react:not(:disabled):not(.disabled).active:focus,.btn-react:not(:disabled):not(.disabled):active:focus,.show>.btn-react.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(90,163,196,.5)}#stats,.count-particles{-webkit-user-select:none}#stats{overflow:hidden}#stats-graph canvas{border-radius:3px 3px 0 0}.count-particles{border-radius:0 0 3px 3px}\ndiff --git a/website/css/presets.min.css b/website/css/presets.min.css\nindex 6c2ae2c..e69de29 100644\n--- a/website/css/presets.min.css\n+++ b/website/css/presets.min.css\n@@ -1 +0,0 @@\n-html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden}\n", "diff --git a/storage/wal/verifier_test.go b/storage/wal/verifier_test.go\nindex 61e1536..a44755f 100644\n--- a/storage/wal/verifier_test.go\n+++ b/storage/wal/verifier_test.go\n@@ -138,22 +138,13 @@ func writeCorruptEntries(file *os.File, t *testing.T, n int) {\n \t\t}\n \t}\n \n-\n \t// Write some random bytes to the file to simulate corruption.\n \tif _, err := file.Write(corruption); err != nil {\n \t\tfatal(t, \"corrupt WAL segment\", err)\n \t}\n-\tcorrupt := []byte{1, 255, 0, 3, 45, 26, 110}\n-\n-\twrote, err := file.Write(corrupt)\n-\tif err != nil {\n-\t\tt.Fatal(err)\n-\t} else if wrote != len(corrupt) {\n-\t\tt.Fatal(\"Error writing corrupt data to file\")\n-\t}\n \n \tif err := file.Close(); err != nil {\n-\t\tt.Fatalf(\"Error: filed to close file: %v\\n\", err)\n+\t\tt.Fatalf(\"Error: failed to close file: %v\\n\", err)\n \t}\n }\n \n", "diff --git a/testnet/stacks-node/src/config.rs b/testnet/stacks-node/src/config.rs\nindex 24ca06c..d80f721 100644\n--- a/testnet/stacks-node/src/config.rs\n+++ b/testnet/stacks-node/src/config.rs\n@@ -1414,8 +1414,8 @@ impl MinerConfig {\n pub fn default() -> MinerConfig {\n MinerConfig {\n min_tx_fee: 1,\n- first_attempt_time_ms: 1_000,\n- subsequent_attempt_time_ms: 30_000,\n+ first_attempt_time_ms: 5_000,\n+ subsequent_attempt_time_ms: 180_000,\n microblock_attempt_time_ms: 30_000,\n probability_pick_no_estimate_tx: 5,\n }\n", "diff --git a/docs/fiddles/system/system-information/get-version-information/index.html b/docs/fiddles/system/system-information/get-version-information/index.html\nnew file mode 100644\nindex 0000000..0867bc3\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/index.html\n@@ -0,0 +1,26 @@\n+<!DOCTYPE html>\n+<html>\n+ <head>\n+ <meta charset=\"UTF-8\">\n+ </head>\n+ <body>\n+ <div>\n+ <div>\n+ <h1>Get version information</h1>\n+ <i>Supports: Win, macOS, Linux <span>|</span> Process: Both</i>\n+ <div>\n+ <div>\n+ <button id=\"version-info\">View Demo</button>\n+ <span id=\"got-version-info\"></span>\n+ </div>\n+ <p>The <code>process</code> module is built into Node.js (therefore you can use this in both the main and renderer processes) and in Electron apps this object has a few more useful properties on it.</p>\n+ <p>The example below gets the version of Electron in use by the app.</p>\n+ <p>See the <a href=\"http://electron.atom.io/docs/api/process\">process documentation <span>(opens in new window)</span></a> for more.</p>\n+ </div>\n+ </div>\n+ </div>\n+ </body>\n+ <script>\n+ require('./renderer.js')\n+ </script>\n+</html>\ndiff --git a/docs/fiddles/system/system-information/get-version-information/main.js b/docs/fiddles/system/system-information/get-version-information/main.js\nnew file mode 100644\nindex 0000000..1f9f917\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/main.js\n@@ -0,0 +1,25 @@\n+const { app, BrowserWindow } = require('electron')\n+\n+let mainWindow = null\n+\n+function createWindow () {\n+ const windowOptions = {\n+ width: 600,\n+ height: 400,\n+ title: 'Get version information',\n+ webPreferences: {\n+ nodeIntegration: true\n+ }\n+ }\n+\n+ mainWindow = new BrowserWindow(windowOptions)\n+ mainWindow.loadFile('index.html')\n+\n+ mainWindow.on('closed', () => {\n+ mainWindow = null\n+ })\n+}\n+\n+app.on('ready', () => {\n+ createWindow()\n+})\ndiff --git a/docs/fiddles/system/system-information/get-version-information/renderer.js b/docs/fiddles/system/system-information/get-version-information/renderer.js\nnew file mode 100644\nindex 0000000..40f7f2c\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/renderer.js\n@@ -0,0 +1,8 @@\n+const versionInfoBtn = document.getElementById('version-info')\n+\n+const electronVersion = process.versions.electron\n+\n+versionInfoBtn.addEventListener('click', () => {\n+ const message = `This app is using Electron version: ${electronVersion}`\n+ document.getElementById('got-version-info').innerHTML = message\n+})\n"]
5
["6a63a9d439e18b6b8483abdf19162f476fcf8563", "9acf7a062ee9c0538c2cd4661c1f5da61ab06316", "fba4326c72fc22d81aba6976a9fef1e4b6154fd9", "d35d302cadf355a169dca6636597183de6bbee23", "16d4ace80096557fb3fd48396aa09107241c3131"]
["feat", "build", "refactor", "fix", "docs"]
licensing,enable performance test trigger This reverts commit 146c7b58154a5b3de957f87e3b193447e0576547.,use trait objects for from_str Use `Box<dyn error::Error>` to allow solutions to use `?` to propagate errors.,removing automatic page push on nav,also make dependents when running smoke tests
["diff --git a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\nindex a4aee6b..bb523fa 100644\n--- a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n+++ b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\n@@ -1,3 +1,18 @@\n+/*\n+ * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n package io.atomix.cluster.messaging.impl;\n \n import static org.assertj.core.api.Assertions.assertThat;\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 399f8b8..c3f8fde 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -120,6 +120,12 @@ pipeline {\n }\n }\n \n+ stage('Trigger Performance Tests') {\n+ when { branch 'develop' }\n+ steps {\n+ build job: 'zeebe-cluster-performance-tests', wait: false\n+ }\n+ }\n }\n \n post {\n", "diff --git a/exercises/conversions/from_str.rs b/exercises/conversions/from_str.rs\nindex 41fccd7..4beebac 100644\n--- a/exercises/conversions/from_str.rs\n+++ b/exercises/conversions/from_str.rs\n@@ -2,6 +2,7 @@\n // Additionally, upon implementing FromStr, you can use the `parse` method\n // on strings to generate an object of the implementor type.\n // You can read more about it at https://doc.rust-lang.org/std/str/trait.FromStr.html\n+use std::error;\n use std::str::FromStr;\n \n #[derive(Debug)]\n@@ -23,7 +24,7 @@ struct Person {\n // If everything goes well, then return a Result of a Person object\n \n impl FromStr for Person {\n- type Err = String;\n+ type Err = Box<dyn error::Error>;\n fn from_str(s: &str) -> Result<Person, Self::Err> {\n }\n }\ndiff --git a/info.toml b/info.toml\nindex 2068750..4a1d3aa 100644\n--- a/info.toml\n+++ b/info.toml\n@@ -884,5 +884,5 @@ path = \"exercises/conversions/from_str.rs\"\n mode = \"test\"\n hint = \"\"\"\n The implementation of FromStr should return an Ok with a Person object,\n-or an Err with a string if the string is not valid.\n+or an Err with an error if the string is not valid.\n This is almost like the `try_from_into` exercise.\"\"\"\n", "diff --git a/ionic/components/nav/test/basic/index.ts b/ionic/components/nav/test/basic/index.ts\nindex 4b1a8ea..2834f68 100644\n--- a/ionic/components/nav/test/basic/index.ts\n+++ b/ionic/components/nav/test/basic/index.ts\n@@ -63,12 +63,6 @@ class FirstPage {\n }\n }\n \n- onPageDidEnter() {\n- setTimeout(() => {\n- this.nav.push(PrimaryHeaderPage);\n- }, 1000);\n- }\n-\n setPages() {\n let items = [\n PrimaryHeaderPage\n", "diff --git a/.github/workflows/os-smoke-test.yml b/.github/workflows/os-smoke-test.yml\nindex 194d108..7e41493 100644\n--- a/.github/workflows/os-smoke-test.yml\n+++ b/.github/workflows/os-smoke-test.yml\n@@ -56,5 +56,7 @@ jobs:\n uses: JesseTG/[email protected]\n with:\n path: /Users/runner/.m2/repository/uk/co/real-logic/sbe-tool\n+ - name: Build relevant modules\n+ run: mvn -B -am -pl qa/integration-tests package -DskipTests -DskipChecks -T1C\n - name: Run smoke test\n run: mvn -B -pl qa/integration-tests verify -P smoke-test -DskipUTs -DskipChecks\n"]
5
["cbe62140ce219da84772e21e7cfb4b5c2a25c1b8", "80944b7a513b442afcb2d0d6c7d71c0d79365dba", "c3e7b831786c9172ed8bd5d150f3c432f242fba9", "cd9e6a2ab17c5961b0f977bb8a06f8545da49a97", "2236b37bd671fdb71313cbc6ebd7633f0effba34"]
["docs", "ci", "fix", "test", "build"]
add react ecosystem,coordinator accepts a request transformer instead of a list of operations The request transformer can generate the operations from the current topology. This helps to - ensure that the operations are generated based on the latest topology. When concurrent changes happens, coordinator can detect it. Previously it was unclear because by the time handle apply operations, the cluster topology might have changed. - return the simulated final topology as part of the result,i18n for Time Picker,create mock img server,add documentation to use react-native-paper with CRA (#874)
["diff --git a/package.json b/package.json\nindex 1ba8c4f..d1de9a0 100644\n--- a/package.json\n+++ b/package.json\n@@ -36,14 +36,19 @@\n \"@types/node\": \"^9.3.0\",\n \"@types/react\": \"^16.0.34\",\n \"@types/react-dom\": \"^16.0.3\",\n+ \"@types/react-motion\": \"^0.0.25\",\n \"bootstrap-sass\": \"^3.3.7\",\n \"highcharts\": \"^6.0.4\",\n \"html2canvas\": \"^1.0.0-alpha.9\",\n+ \"immer\": \"^1.2.1\",\n \"lodash\": \"^4.17.4\",\n \"moment\": \"^2.20.1\",\n \"normalize.css\": \"^8.0.0\",\n- \"react\": \"^16.2.0\",\n- \"react-dom\": \"^16.2.0\",\n+ \"react\": \"^16.3.1\",\n+ \"react-dom\": \"^16.3.1\",\n+ \"react-motion\": \"^0.5.2\",\n+ \"react-redux\": \"^5.0.7\",\n+ \"redux\": \"^3.7.2\",\n \"rxjs\": \"^5.5.6\",\n \"vue\": \"^2.5.13\",\n \"vue-plugin-webextension-i18n\": \"^0.1.0\",\ndiff --git a/yarn.lock b/yarn.lock\nindex c8898d8..5d0fc9f 100644\n--- a/yarn.lock\n+++ b/yarn.lock\n@@ -187,6 +187,12 @@\n \"@types/node\" \"*\"\n \"@types/react\" \"*\"\n \n+\"@types/react-motion@^0.0.25\":\n+ version \"0.0.25\"\n+ resolved \"https://registry.npmjs.org/@types/react-motion/-/react-motion-0.0.25.tgz#2445745ee8e8e6149faa47a36ff6b0d4c21dbf94\"\n+ dependencies:\n+ \"@types/react\" \"*\"\n+\n \"@types/react@*\", \"@types/react@^16.0.34\":\n version \"16.0.40\"\n resolved \"https://registry.npmjs.org/@types/react/-/react-16.0.40.tgz#caabc2296886f40b67f6fc80f0f3464476461df9\"\n@@ -3837,6 +3843,10 @@ [email protected]:\n version \"4.2.1\"\n resolved \"https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb\"\n \n+hoist-non-react-statics@^2.5.0:\n+ version \"2.5.0\"\n+ resolved \"https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.0.tgz#d2ca2dfc19c5a91c5a6615ce8e564ef0347e2a40\"\n+\n home-or-tmp@^2.0.0:\n version \"2.0.0\"\n resolved \"https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8\"\n@@ -4004,6 +4014,10 @@ ignore@^3.3.5:\n version \"3.3.7\"\n resolved \"https://registry.npmjs.org/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021\"\n \n+immer@^1.2.1:\n+ version \"1.2.1\"\n+ resolved \"https://registry.npmjs.org/immer/-/immer-1.2.1.tgz#96e2ae29cdfc428f28120b832701931b92fa597c\"\n+\n import-local@^1.0.0:\n version \"1.0.0\"\n resolved \"https://registry.npmjs.org/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc\"\n@@ -4104,7 +4118,7 @@ interpret@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614\"\n \n-invariant@^2.2.2:\n+invariant@^2.0.0, invariant@^2.2.2:\n version \"2.2.4\"\n resolved \"https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6\"\n dependencies:\n@@ -5040,6 +5054,10 @@ locate-path@^2.0.0:\n p-locate \"^2.0.0\"\n path-exists \"^3.0.0\"\n \n+lodash-es@^4.17.5, lodash-es@^4.2.1:\n+ version \"4.17.8\"\n+ resolved \"https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.8.tgz#6fa8c8c5d337481df0bdf1c0d899d42473121e45\"\n+\n lodash._reinterpolate@~3.0.0:\n version \"3.0.0\"\n resolved \"https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d\"\n@@ -5149,7 +5167,7 @@ [email protected]:\n version \"4.17.2\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz#34a3055babe04ce42467b607d700072c7ff6bf42\"\n \[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\n version \"4.17.5\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511\"\n \n@@ -6467,7 +6485,7 @@ promise@^7.1.1:\n dependencies:\n asap \"~2.0.3\"\n \n-prop-types@^15.6.0:\n+prop-types@^15.5.8, prop-types@^15.6.0:\n version \"15.6.1\"\n resolved \"https://registry.npmjs.org/prop-types/-/prop-types-15.6.1.tgz#36644453564255ddda391191fb3a125cbdf654ca\"\n dependencies:\n@@ -6574,7 +6592,7 @@ quick-lru@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8\"\n \[email protected]:\[email protected], raf@^3.1.0:\n version \"3.4.0\"\n resolved \"https://registry.npmjs.org/raf/-/raf-3.4.0.tgz#a28876881b4bc2ca9117d4138163ddb80f781575\"\n dependencies:\n@@ -6645,9 +6663,9 @@ react-dev-utils@^5.0.0:\n strip-ansi \"3.0.1\"\n text-table \"0.2.0\"\n \n-react-dom@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.2.0.tgz#69003178601c0ca19b709b33a83369fe6124c044\"\n+react-dom@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.3.1.tgz#6a3c90a4fb62f915bdbcf6204422d93a7d4ca573\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6658,9 +6676,28 @@ react-error-overlay@^4.0.0:\n version \"4.0.0\"\n resolved \"https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-4.0.0.tgz#d198408a85b4070937a98667f500c832f86bd5d4\"\n \n-react@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react/-/react-16.2.0.tgz#a31bd2dab89bff65d42134fa187f24d054c273ba\"\n+react-motion@^0.5.2:\n+ version \"0.5.2\"\n+ resolved \"https://registry.npmjs.org/react-motion/-/react-motion-0.5.2.tgz#0dd3a69e411316567927917c6626551ba0607316\"\n+ dependencies:\n+ performance-now \"^0.2.0\"\n+ prop-types \"^15.5.8\"\n+ raf \"^3.1.0\"\n+\n+react-redux@^5.0.7:\n+ version \"5.0.7\"\n+ resolved \"https://registry.npmjs.org/react-redux/-/react-redux-5.0.7.tgz#0dc1076d9afb4670f993ffaef44b8f8c1155a4c8\"\n+ dependencies:\n+ hoist-non-react-statics \"^2.5.0\"\n+ invariant \"^2.0.0\"\n+ lodash \"^4.17.5\"\n+ lodash-es \"^4.17.5\"\n+ loose-envify \"^1.1.0\"\n+ prop-types \"^15.6.0\"\n+\n+react@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react/-/react-16.3.1.tgz#4a2da433d471251c69b6033ada30e2ed1202cfd8\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6788,6 +6825,15 @@ reduce-function-call@^1.0.1:\n dependencies:\n balanced-match \"^0.4.2\"\n \n+redux@^3.7.2:\n+ version \"3.7.2\"\n+ resolved \"https://registry.npmjs.org/redux/-/redux-3.7.2.tgz#06b73123215901d25d065be342eb026bc1c8537b\"\n+ dependencies:\n+ lodash \"^4.2.1\"\n+ lodash-es \"^4.2.1\"\n+ loose-envify \"^1.1.0\"\n+ symbol-observable \"^1.0.3\"\n+\n regenerate@^1.2.1:\n version \"1.3.3\"\n resolved \"https://registry.npmjs.org/regenerate/-/regenerate-1.3.3.tgz#0c336d3980553d755c39b586ae3b20aa49c82b7f\"\n@@ -7811,6 +7857,10 @@ [email protected]:\n version \"1.0.1\"\n resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4\"\n \n+symbol-observable@^1.0.3:\n+ version \"1.2.0\"\n+ resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804\"\n+\n symbol-tree@^3.2.2:\n version \"3.2.2\"\n resolved \"https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6\"\n", "diff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\nindex 8bb5c3d..f8f5e24 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.topology.changes;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.topology.state.ClusterTopology;\n import io.camunda.zeebe.topology.state.TopologyChangeOperation;\n+import io.camunda.zeebe.util.Either;\n import java.util.List;\n \n public interface TopologyChangeCoordinator {\n@@ -39,4 +40,16 @@ public interface TopologyChangeCoordinator {\n ActorFuture<Boolean> hasCompletedChanges(final long version);\n \n ActorFuture<ClusterTopology> getCurrentTopology();\n+\n+ ActorFuture<TopologyChangeResult> applyOperations(TopologyChangeRequest request);\n+\n+ record TopologyChangeResult(\n+ ClusterTopology currentTopology,\n+ ClusterTopology finalTopology,\n+ List<TopologyChangeOperation> operations) {}\n+\n+ interface TopologyChangeRequest {\n+ Either<Exception, List<TopologyChangeOperation>> operations(\n+ final ClusterTopology currentTopology);\n+ }\n }\ndiff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\nindex 13ec754..877fc3c 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n@@ -103,6 +103,62 @@ public class TopologyChangeCoordinatorImpl implements TopologyChangeCoordinator \n return clusterTopologyManager.getClusterTopology();\n }\n \n+ @Override\n+ public ActorFuture<TopologyChangeResult> applyOperations(final TopologyChangeRequest request) {\n+ final ActorFuture<TopologyChangeResult> future = executor.createFuture();\n+ clusterTopologyManager\n+ .getClusterTopology()\n+ .onComplete(\n+ (currentClusterTopology, errorOnGettingTopology) -> {\n+ if (errorOnGettingTopology != null) {\n+ future.completeExceptionally(errorOnGettingTopology);\n+ return;\n+ }\n+\n+ final var operationsEither = request.operations(currentClusterTopology);\n+ if (operationsEither.isLeft()) {\n+ future.completeExceptionally(operationsEither.getLeft());\n+ return;\n+ }\n+ final var operations = operationsEither.get();\n+ if (operations.isEmpty()) {\n+ // No operations to apply\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, currentClusterTopology, operations));\n+ return;\n+ }\n+\n+ final ActorFuture<ClusterTopology> validation =\n+ validateTopologyChangeRequest(currentClusterTopology, operations);\n+\n+ validation.onComplete(\n+ (simulatedFinalTopology, validationError) -> {\n+ if (validationError != null) {\n+ future.completeExceptionally(validationError);\n+ return;\n+ }\n+\n+ // if the validation was successful, apply the changes\n+ final ActorFuture<ClusterTopology> applyFuture = executor.createFuture();\n+ applyTopologyChange(\n+ operations, currentClusterTopology, simulatedFinalTopology, applyFuture);\n+\n+ applyFuture.onComplete(\n+ (ignore, error) -> {\n+ if (error == null) {\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, simulatedFinalTopology, operations));\n+ } else {\n+ future.completeExceptionally(error);\n+ }\n+ });\n+ });\n+ });\n+ return future;\n+ }\n+\n private ActorFuture<ClusterTopology> validateTopologyChangeRequest(\n final ClusterTopology currentClusterTopology,\n final List<TopologyChangeOperation> operations) {\n", "diff --git a/packages/nc-gui/components/cell/TimePicker.vue b/packages/nc-gui/components/cell/TimePicker.vue\nindex 619ab45..7f66828 100644\n--- a/packages/nc-gui/components/cell/TimePicker.vue\n+++ b/packages/nc-gui/components/cell/TimePicker.vue\n@@ -38,6 +38,8 @@ const isTimeInvalid = ref(false)\n \n const dateFormat = isMysql(column.value.base_id) ? 'YYYY-MM-DD HH:mm:ss' : 'YYYY-MM-DD HH:mm:ssZ'\n \n+const { t } = useI18n()\n+\n const localState = computed({\n get() {\n if (!modelValue) {\n@@ -89,11 +91,11 @@ watch(\n \n const placeholder = computed(() => {\n if (isEditColumn.value && (modelValue === '' || modelValue === null)) {\n- return '(Optional)'\n+ return t('labels.optional')\n } else if (modelValue === null && showNull.value) {\n- return 'NULL'\n+ return t('general.null')\n } else if (isTimeInvalid.value) {\n- return 'Invalid time'\n+ return t('msg.invalidTime')\n } else {\n return ''\n }\n", "diff --git a/scripts/gulp/tasks/test.ts b/scripts/gulp/tasks/test.ts\nindex 8014b12..d10c1aa 100644\n--- a/scripts/gulp/tasks/test.ts\n+++ b/scripts/gulp/tasks/test.ts\n@@ -26,12 +26,18 @@ task('test.imageserver', () => {\n function handleRequest(req, res) {\n const urlParse = url.parse(req.url, true);\n \n+ res.setHeader('Access-Control-Allow-Origin', '*');\n+ res.setHeader('Access-Control-Allow-Methods', 'GET');\n+ res.setHeader('Connection', 'keep-alive');\n+ res.setHeader('Age', '0');\n+ res.setHeader('cache-control', 'no-store');\n+\n if (urlParse.pathname === '/reset') {\n console.log('Image Server Reset');\n console.log('---------------------------');\n requestedUrls.length = 0;\n start = Date.now();\n- res.setHeader('Access-Control-Allow-Origin', '*');\n+ res.setHeader('Content-Type', 'text/plain');\n res.end('reset');\n return;\n }\n@@ -48,9 +54,8 @@ task('test.imageserver', () => {\n \n setTimeout(() => {\n res.setHeader('Content-Type', 'image/svg+xml');\n- res.setHeader('Access-Control-Allow-Origin', '*');\n res.end(`<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n- style=\"background-color: ${color}; width: ${width}px; height: ${height}px;\">\n+ viewBox=\"0 0 ${width} ${height}\" style=\"background-color: ${color};\">\n <text x=\"5\" y=\"22\" style=\"font-family: Courier; font-size: 24px\">${id}</text>\n </svg>`);\n }, delay);\n", "diff --git a/docs/pages/4.react-native-web.md b/docs/pages/4.react-native-web.md\nindex 69e4e52..8d6ae2a 100644\n--- a/docs/pages/4.react-native-web.md\n+++ b/docs/pages/4.react-native-web.md\n@@ -16,6 +16,63 @@ To install `react-native-web`, run:\n yarn add react-native-web react-dom react-art\n ```\n \n+### Using CRA ([Create React App](https://github.com/facebook/create-react-app))\n+\n+Install [`react-app-rewired`](https://github.com/timarney/react-app-rewired) to override `webpack` configuration:\n+\n+```sh\n+yarn add --dev react-app-rewired\n+```\n+\n+[Configure `babel-loader`](#2-configure-babel-loader) using a new file called `config-overrides.js`:\n+\n+```js\n+module.exports = function override(config, env) {\n+ config.module.rules.push({\n+ test: /\\.js$/,\n+ exclude: /node_modules[/\\\\](?!react-native-paper|react-native-vector-icons|react-native-safe-area-view)/,\n+ use: {\n+ loader: \"babel-loader\",\n+ options: {\n+ // Disable reading babel configuration\n+ babelrc: false,\n+ configFile: false,\n+\n+ // The configration for compilation\n+ presets: [\n+ [\"@babel/preset-env\", { useBuiltIns: \"usage\" }],\n+ \"@babel/preset-react\",\n+ \"@babel/preset-flow\"\n+ ],\n+ plugins: [\n+ \"@babel/plugin-proposal-class-properties\",\n+ \"@babel/plugin-proposal-object-rest-spread\"\n+ ]\n+ }\n+ }\n+ });\n+\n+ return config;\n+};\n+```\n+\n+Change your script in `package.json`:\n+\n+```diff\n+/* package.json */\n+\n+ \"scripts\": {\n+- \"start\": \"react-scripts start\",\n++ \"start\": \"react-app-rewired start\",\n+- \"build\": \"react-scripts build\",\n++ \"build\": \"react-app-rewired build\",\n+- \"test\": \"react-scripts test --env=jsdom\",\n++ \"test\": \"react-app-rewired test --env=jsdom\"\n+}\n+```\n+\n+### Custom webpack setup\n+\n To install `webpack`, run:\n \n ```sh\n"]
5
["7e04a5e829d7416e312ac342a00a11787745753b", "dec860436916ef216998f80f8b2f9c39d00c064d", "48806e3675c7b18327e7629827454d7c29be25a9", "32b76173a259ea1993298289b436cf10c1e800bf", "ee7cc5d5a940fba774e715b1f029c6361110b108"]
["build", "feat", "fix", "test", "docs"]
apply element migrated events This is a very straightforward event applier. All it needs to do is update the persisted data for the element instance using the data in the event.,i18n for Time Picker,fix node test cases run into infinite loop,remove unused branches and ignore envrc file,replace tuple with record
["diff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\nindex da05e13..9231df3 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n@@ -154,6 +154,9 @@ public final class EventAppliers implements EventApplier {\n register(\n ProcessInstanceIntent.SEQUENCE_FLOW_TAKEN,\n new ProcessInstanceSequenceFlowTakenApplier(elementInstanceState, processState));\n+ register(\n+ ProcessInstanceIntent.ELEMENT_MIGRATED,\n+ new ProcessInstanceElementMigratedApplier(elementInstanceState));\n }\n \n private void registerProcessInstanceCreationAppliers(final MutableProcessingState state) {\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\nindex e5a0f3a..d38358f 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n@@ -24,5 +24,16 @@ final class ProcessInstanceElementMigratedApplier\n }\n \n @Override\n- public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {}\n+ public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {\n+ elementInstanceState.updateInstance(\n+ elementInstanceKey,\n+ elementInstance ->\n+ elementInstance\n+ .getValue()\n+ .setProcessDefinitionKey(value.getProcessDefinitionKey())\n+ .setBpmnProcessId(value.getBpmnProcessId())\n+ .setVersion(value.getVersion())\n+ .setElementId(value.getElementId())\n+ .setFlowScopeKey(value.getFlowScopeKey()));\n+ }\n }\n", "diff --git a/packages/nc-gui/components/cell/TimePicker.vue b/packages/nc-gui/components/cell/TimePicker.vue\nindex 619ab45..7f66828 100644\n--- a/packages/nc-gui/components/cell/TimePicker.vue\n+++ b/packages/nc-gui/components/cell/TimePicker.vue\n@@ -38,6 +38,8 @@ const isTimeInvalid = ref(false)\n \n const dateFormat = isMysql(column.value.base_id) ? 'YYYY-MM-DD HH:mm:ss' : 'YYYY-MM-DD HH:mm:ssZ'\n \n+const { t } = useI18n()\n+\n const localState = computed({\n get() {\n if (!modelValue) {\n@@ -89,11 +91,11 @@ watch(\n \n const placeholder = computed(() => {\n if (isEditColumn.value && (modelValue === '' || modelValue === null)) {\n- return '(Optional)'\n+ return t('labels.optional')\n } else if (modelValue === null && showNull.value) {\n- return 'NULL'\n+ return t('general.null')\n } else if (isTimeInvalid.value) {\n- return 'Invalid time'\n+ return t('msg.invalidTime')\n } else {\n return ''\n }\n", "diff --git a/packages/designer/tests/document/node/node.test.ts b/packages/designer/tests/document/node/node.test.ts\nindex dd20bd3..113360d 100644\n--- a/packages/designer/tests/document/node/node.test.ts\n+++ b/packages/designer/tests/document/node/node.test.ts\n@@ -26,7 +26,7 @@ import rootHeaderMetadata from '../../fixtures/component-metadata/root-header';\n import rootContentMetadata from '../../fixtures/component-metadata/root-content';\n import rootFooterMetadata from '../../fixtures/component-metadata/root-footer';\n \n-describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n+describe('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n let editor: Editor;\n let designer: Designer;\n let project: Project;\n@@ -474,15 +474,16 @@ describe.skip('Node \u65b9\u6cd5\u6d4b\u8bd5', () => {\n it('didDropIn / didDropOut', () => {\n const form = doc.getNode('node_k1ow3cbo');\n designer.createComponentMeta(divMetadata);\n+ designer.createComponentMeta(formMetadata);\n const callbacks = form.componentMeta.getMetadata().configure.advanced?.callbacks;\n const fn1 = callbacks.onNodeAdd = jest.fn();\n const fn2 = callbacks.onNodeRemove = jest.fn();\n const textField = doc.getNode('node_k1ow3cc9');\n form.didDropIn(textField);\n- expect(fn1).toHaveBeenCalledWith(textField, form);\n+ expect(fn1).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n \n form.didDropOut(textField);\n- expect(fn2).toHaveBeenCalledWith(textField, form);\n+ expect(fn2).toHaveBeenCalledWith(textField.internalToShellNode(), form.internalToShellNode());\n });\n \n it('hover', () => {\n", "diff --git a/.github/workflows/ibis-backends-cloud.yml b/.github/workflows/ibis-backends-cloud.yml\nindex 2003e8e..7c7fd26 100644\n--- a/.github/workflows/ibis-backends-cloud.yml\n+++ b/.github/workflows/ibis-backends-cloud.yml\n@@ -5,9 +5,12 @@ on:\n # Skip the backend suite if all changes are in the docs directory\n paths-ignore:\n - \"docs/**\"\n+ - \"**/*.md\"\n+ - \"**/*.qmd\"\n+ - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n- - quarto\n \n permissions:\n # this allows extractions/setup-just to list releases for `just` at a higher\ndiff --git a/.github/workflows/ibis-backends-skip-helper.yml b/.github/workflows/ibis-backends-skip-helper.yml\nindex 5d5f3f7..0471994 100644\n--- a/.github/workflows/ibis-backends-skip-helper.yml\n+++ b/.github/workflows/ibis-backends-skip-helper.yml\n@@ -9,20 +9,20 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n jobs:\n test_backends:\ndiff --git a/.github/workflows/ibis-backends.yml b/.github/workflows/ibis-backends.yml\nindex 4a1cae9..30e6c1a 100644\n--- a/.github/workflows/ibis-backends.yml\n+++ b/.github/workflows/ibis-backends.yml\n@@ -8,10 +8,10 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n # Skip the backend suite if all changes are docs\n paths-ignore:\n@@ -19,10 +19,10 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n permissions:\ndiff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 1adda11..b528a30 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -5,12 +5,10 @@ on:\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/ibis-main-skip-helper.yml b/.github/workflows/ibis-main-skip-helper.yml\nindex a5fdc6f..0fb5dea 100644\n--- a/.github/workflows/ibis-main-skip-helper.yml\n+++ b/.github/workflows/ibis-main-skip-helper.yml\n@@ -8,19 +8,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n jobs:\n test_core:\ndiff --git a/.github/workflows/ibis-main.yml b/.github/workflows/ibis-main.yml\nindex aa31436..0b1536a 100644\n--- a/.github/workflows/ibis-main.yml\n+++ b/.github/workflows/ibis-main.yml\n@@ -7,20 +7,20 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n # Skip the test suite if all changes are in the docs directory\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n permissions:\ndiff --git a/.github/workflows/ibis-tpch-queries-skip-helper.yml b/.github/workflows/ibis-tpch-queries-skip-helper.yml\nindex 1f1c0bc..f10fb8d 100644\n--- a/.github/workflows/ibis-tpch-queries-skip-helper.yml\n+++ b/.github/workflows/ibis-tpch-queries-skip-helper.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/ibis-tpch-queries.yml b/.github/workflows/ibis-tpch-queries.yml\nindex b4f8a48..9e65a61 100644\n--- a/.github/workflows/ibis-tpch-queries.yml\n+++ b/.github/workflows/ibis-tpch-queries.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/nix-skip-helper.yml b/.github/workflows/nix-skip-helper.yml\nindex 677b4d7..e0ab8f7 100644\n--- a/.github/workflows/nix-skip-helper.yml\n+++ b/.github/workflows/nix-skip-helper.yml\n@@ -9,19 +9,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n jobs:\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex f2dd3f0..7ea9e26 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\n", "diff --git a/engine/src/main/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceProcessor.java b/engine/src/main/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceProcessor.java\nindex fa6f8d4..2185b1e 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceProcessor.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceProcessor.java\n@@ -37,7 +37,6 @@ import io.camunda.zeebe.protocol.record.intent.ProcessInstanceCreationIntent;\n import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n import io.camunda.zeebe.util.Either;\n-import io.camunda.zeebe.util.collection.Tuple;\n import java.util.Arrays;\n import java.util.HashMap;\n import java.util.Map;\n@@ -236,21 +235,22 @@ public final class CreateProcessInstanceProcessor\n return startInstructions.stream()\n .map(\n instruction ->\n- Tuple.of(\n+ new ElementIdAndType(\n instruction.getElementId(),\n process.getElementById(instruction.getElementIdBuffer()).getElementType()))\n- .filter(elementTuple -> UNSUPPORTED_ELEMENT_TYPES.contains(elementTuple.getRight()))\n+ .filter(\n+ elementIdAndType -> UNSUPPORTED_ELEMENT_TYPES.contains(elementIdAndType.elementType))\n .findAny()\n .map(\n- elementTypeTuple ->\n+ elementIdAndType ->\n Either.left(\n new Rejection(\n RejectionType.INVALID_ARGUMENT,\n (\"Expected to create instance of process with start instructions but the element with id '%s' targets unsupported element type '%s'. \"\n + \"Supported element types are: %s\")\n .formatted(\n- elementTypeTuple.getLeft(),\n- elementTypeTuple.getRight(),\n+ elementIdAndType.elementId,\n+ elementIdAndType.elementType,\n Arrays.stream(BpmnElementType.values())\n .filter(\n elementType ->\n@@ -493,4 +493,6 @@ public final class CreateProcessInstanceProcessor\n }\n \n record Rejection(RejectionType type, String reason) {}\n+\n+ record ElementIdAndType(String elementId, BpmnElementType elementType) {}\n }\n"]
5
["39d5d1cfe8d2210305df2c8fab4a4ae430732cf7", "48806e3675c7b18327e7629827454d7c29be25a9", "d2c3f0ba6f85b659b76636a91ea9ab2b5a95a720", "d0c6476df61b9c6ab07b87e1724ea7c5318595bb", "bb2ccc1a778452aebf233cf78b20f1f4bab4354b"]
["feat", "fix", "test", "ci", "refactor"]
add workingDirectory option to shell.openExternal() (#15065) Allows passing `workingDirectory` to the underlying `ShellExecuteW` API on Windows._x000D_ _x000D_ the motivation is that by default `ShellExecute` would use the current working directory, which would get locked on Windows and can prevent autoUpdater from working correctly. We need to be able specify a different `workingDirectory` to prevent this situation.,rework RaftCommittedEntryListener Iterate over RaftCommittedEntryListener and refactor the listener such it serves the actual need. We have some services (to be specific the AsyncSnapshotDirector) which need the committed position, and want to listen to new updates. In raft we know which record we are committing and whether it was an application record so we can pass this information threw the listeners. This avoids to pass in the whole IndexedRecord object, and reduce the potential of going out of OOM because of keeping to much data in heap (when commit is not possible).,fix sonar integration,stop playing audio on panel close Closes #824,fix golden tests for aws_vpn_connection
["diff --git a/atom/browser/atom_browser_client.cc b/atom/browser/atom_browser_client.cc\nindex 97e5f26..df0774b 100644\n--- a/atom/browser/atom_browser_client.cc\n+++ b/atom/browser/atom_browser_client.cc\n@@ -611,7 +611,7 @@ void OnOpenExternal(const GURL& escaped_url, bool allowed) {\n #else\n escaped_url,\n #endif\n- true);\n+ platform_util::OpenExternalOptions());\n }\n \n void HandleExternalProtocolInUI(\ndiff --git a/atom/common/api/atom_api_shell.cc b/atom/common/api/atom_api_shell.cc\nindex 1323cd6..7c67c7a 100644\n--- a/atom/common/api/atom_api_shell.cc\n+++ b/atom/common/api/atom_api_shell.cc\n@@ -60,11 +60,12 @@ bool OpenExternal(\n const GURL& url,\n #endif\n mate::Arguments* args) {\n- bool activate = true;\n+ platform_util::OpenExternalOptions options;\n if (args->Length() >= 2) {\n- mate::Dictionary options;\n- if (args->GetNext(&options)) {\n- options.Get(\"activate\", &activate);\n+ mate::Dictionary obj;\n+ if (args->GetNext(&obj)) {\n+ obj.Get(\"activate\", &options.activate);\n+ obj.Get(\"workingDirectory\", &options.working_dir);\n }\n }\n \n@@ -72,13 +73,13 @@ bool OpenExternal(\n base::Callback<void(v8::Local<v8::Value>)> callback;\n if (args->GetNext(&callback)) {\n platform_util::OpenExternal(\n- url, activate,\n+ url, options,\n base::Bind(&OnOpenExternalFinished, args->isolate(), callback));\n return true;\n }\n }\n \n- return platform_util::OpenExternal(url, activate);\n+ return platform_util::OpenExternal(url, options);\n }\n \n #if defined(OS_WIN)\ndiff --git a/atom/common/platform_util.h b/atom/common/platform_util.h\nindex 6fd8405..6686a4f 100644\n--- a/atom/common/platform_util.h\n+++ b/atom/common/platform_util.h\n@@ -8,6 +8,7 @@\n #include <string>\n \n #include \"base/callback_forward.h\"\n+#include \"base/files/file_path.h\"\n #include \"build/build_config.h\"\n \n #if defined(OS_WIN)\n@@ -16,10 +17,6 @@\n \n class GURL;\n \n-namespace base {\n-class FilePath;\n-}\n-\n namespace platform_util {\n \n typedef base::Callback<void(const std::string&)> OpenExternalCallback;\n@@ -32,6 +29,11 @@ bool ShowItemInFolder(const base::FilePath& full_path);\n // Must be called from the UI thread.\n bool OpenItem(const base::FilePath& full_path);\n \n+struct OpenExternalOptions {\n+ bool activate = true;\n+ base::FilePath working_dir;\n+};\n+\n // Open the given external protocol URL in the desktop's default manner.\n // (For example, mailto: URLs in the default mail user agent.)\n bool OpenExternal(\n@@ -40,7 +42,7 @@ bool OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate);\n+ const OpenExternalOptions& options);\n \n // The asynchronous version of OpenExternal.\n void OpenExternal(\n@@ -49,7 +51,7 @@ void OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback);\n \n // Move a file to trash.\ndiff --git a/atom/common/platform_util_linux.cc b/atom/common/platform_util_linux.cc\nindex 63ee0bd..f17cbda 100644\n--- a/atom/common/platform_util_linux.cc\n+++ b/atom/common/platform_util_linux.cc\n@@ -80,7 +80,7 @@ bool OpenItem(const base::FilePath& full_path) {\n return XDGOpen(full_path.value(), false);\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n // Don't wait for exit, since we don't want to wait for the browser/email\n // client window to close before returning\n if (url.SchemeIs(\"mailto\"))\n@@ -90,10 +90,10 @@ bool OpenExternal(const GURL& url, bool activate) {\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_mac.mm b/atom/common/platform_util_mac.mm\nindex b83b1e1..4cda8bf 100644\n--- a/atom/common/platform_util_mac.mm\n+++ b/atom/common/platform_util_mac.mm\n@@ -139,16 +139,16 @@ bool OpenItem(const base::FilePath& full_path) {\n launchIdentifiers:NULL];\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n DCHECK([NSThread isMainThread]);\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (ns_url)\n- return OpenURL(ns_url, activate).empty();\n+ return OpenURL(ns_url, options.activate).empty();\n return false;\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (!ns_url) {\n@@ -157,13 +157,13 @@ void OpenExternal(const GURL& url,\n }\n \n __block OpenExternalCallback c = callback;\n- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),\n- ^{\n- __block std::string error = OpenURL(ns_url, activate);\n- dispatch_async(dispatch_get_main_queue(), ^{\n- c.Run(error);\n- });\n- });\n+ dispatch_async(\n+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n+ __block std::string error = OpenURL(ns_url, options.activate);\n+ dispatch_async(dispatch_get_main_queue(), ^{\n+ c.Run(error);\n+ });\n+ });\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_win.cc b/atom/common/platform_util_win.cc\nindex 34576be..5712200 100644\n--- a/atom/common/platform_util_win.cc\n+++ b/atom/common/platform_util_win.cc\n@@ -294,15 +294,18 @@ bool OpenItem(const base::FilePath& full_path) {\n return ui::win::OpenFileViaShell(full_path);\n }\n \n-bool OpenExternal(const base::string16& url, bool activate) {\n+bool OpenExternal(const base::string16& url,\n+ const OpenExternalOptions& options) {\n // Quote the input scheme to be sure that the command does not have\n // parameters unexpected by the external program. This url should already\n // have been escaped.\n base::string16 escaped_url = L\"\\\"\" + url + L\"\\\"\";\n+ auto working_dir = options.working_dir.value();\n \n- if (reinterpret_cast<ULONG_PTR>(ShellExecuteW(\n- NULL, L\"open\", escaped_url.c_str(), NULL, NULL, SW_SHOWNORMAL)) <=\n- 32) {\n+ if (reinterpret_cast<ULONG_PTR>(\n+ ShellExecuteW(nullptr, L\"open\", escaped_url.c_str(), nullptr,\n+ working_dir.empty() ? nullptr : working_dir.c_str(),\n+ SW_SHOWNORMAL)) <= 32) {\n // We fail to execute the call. We could display a message to the user.\n // TODO(nsylvain): we should also add a dialog to warn on errors. See\n // bug 1136923.\n@@ -312,10 +315,10 @@ bool OpenExternal(const base::string16& url, bool activate) {\n }\n \n void OpenExternal(const base::string16& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& path) {\ndiff --git a/docs/api/shell.md b/docs/api/shell.md\nindex a469f94..b38348a 100644\n--- a/docs/api/shell.md\n+++ b/docs/api/shell.md\n@@ -37,9 +37,10 @@ Open the given file in the desktop's default manner.\n ### `shell.openExternal(url[, options, callback])`\n \n * `url` String - Max 2081 characters on windows, or the function returns false.\n-* `options` Object (optional) _macOS_\n- * `activate` Boolean - `true` to bring the opened application to the\n- foreground. The default is `true`.\n+* `options` Object (optional)\n+ * `activate` Boolean (optional) - `true` to bring the opened application to the\n+ foreground. The default is `true`. _macOS_\n+ * `workingDirectory` String (optional) - The working directory. _Windows_\n * `callback` Function (optional) _macOS_ - If specified will perform the open asynchronously.\n * `error` Error\n \n", "diff --git a/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java b/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java\nnew file mode 100644\nindex 0000000..57c28a9\n--- /dev/null\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java\n@@ -0,0 +1,31 @@\n+/*\n+ * Copyright 2016-present Open Networking Foundation\n+ * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.atomix.raft;\n+\n+/**\n+ * This listener will only be called by the Leader, when it commits an application entry.\n+ *\n+ * <p>If RAFT is currently running in a follower role, it will not call this listener.\n+ */\n+@FunctionalInterface\n+public interface RaftApplicationEntryCommittedPositionListener {\n+\n+ /**\n+ * @param committedPosition the new committed position which is related to the application entries\n+ */\n+ void onCommit(long committedPosition);\n+}\ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java b/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java\ndeleted file mode 100644\nindex 3d11d75..0000000\n--- a/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java\n+++ /dev/null\n@@ -1,32 +0,0 @@\n-/*\n- * Copyright 2016-present Open Networking Foundation\n- * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.atomix.raft;\n-\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n-\n-/**\n- * This listener will only be called by the Leader, when it commits an entry. If RAFT is currently\n- * running in a follower role, it will not call this listener.\n- */\n-@FunctionalInterface\n-public interface RaftCommittedEntryListener {\n-\n- /**\n- * @param indexedRaftLogEntry the new committed entry\n- */\n- void onCommit(IndexedRaftLogEntry indexedRaftLogEntry);\n-}\ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java b/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\nindex 1f4ee98..c177cb1 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\n@@ -27,8 +27,8 @@ import io.atomix.cluster.MemberId;\n import io.atomix.cluster.messaging.MessagingException.NoRemoteHandler;\n import io.atomix.cluster.messaging.MessagingException.NoSuchMemberException;\n import io.atomix.raft.ElectionTimer;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.atomix.raft.RaftCommitListener;\n-import io.atomix.raft.RaftCommittedEntryListener;\n import io.atomix.raft.RaftError;\n import io.atomix.raft.RaftException.ProtocolException;\n import io.atomix.raft.RaftRoleChangeListener;\n@@ -61,7 +61,6 @@ import io.atomix.raft.roles.PromotableRole;\n import io.atomix.raft.roles.RaftRole;\n import io.atomix.raft.storage.RaftStorage;\n import io.atomix.raft.storage.StorageException;\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n import io.atomix.raft.storage.log.RaftLog;\n import io.atomix.raft.storage.system.MetaStore;\n import io.atomix.raft.utils.StateUtil;\n@@ -115,7 +114,7 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n private final Set<Consumer<State>> stateChangeListeners = new CopyOnWriteArraySet<>();\n private final Set<Consumer<RaftMember>> electionListeners = new CopyOnWriteArraySet<>();\n private final Set<RaftCommitListener> commitListeners = new CopyOnWriteArraySet<>();\n- private final Set<RaftCommittedEntryListener> committedEntryListeners =\n+ private final Set<RaftApplicationEntryCommittedPositionListener> committedEntryListeners =\n new CopyOnWriteArraySet<>();\n private final Set<SnapshotReplicationListener> snapshotReplicationListeners =\n new CopyOnWriteArraySet<>();\n@@ -433,21 +432,23 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n * <p>Note that it will be called on the Raft thread, and as such should not perform any heavy\n * computation.\n *\n- * @param raftCommittedEntryListener the listener to add\n+ * @param raftApplicationEntryCommittedPositionListener the listener to add\n */\n public void addCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n- committedEntryListeners.add(raftCommittedEntryListener);\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n+ committedEntryListeners.add(raftApplicationEntryCommittedPositionListener);\n }\n \n /**\n * Removes registered committedEntryListener\n *\n- * @param raftCommittedEntryListener the listener to remove\n+ * @param raftApplicationEntryCommittedPositionListener the listener to remove\n */\n public void removeCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n- committedEntryListeners.remove(raftCommittedEntryListener);\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n+ committedEntryListeners.remove(raftApplicationEntryCommittedPositionListener);\n }\n \n /**\n@@ -464,7 +465,7 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n *\n * @param committedEntry the most recently committed entry\n */\n- public void notifyCommittedEntryListeners(final IndexedRaftLogEntry committedEntry) {\n+ public void notifyApplicationEntryCommittedPositionListeners(final long committedEntry) {\n committedEntryListeners.forEach(listener -> listener.onCommit(committedEntry));\n }\n \ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java b/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\nindex 56c7172..d075fca 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\n@@ -21,8 +21,8 @@ import io.atomix.cluster.MemberId;\n import io.atomix.cluster.messaging.ClusterCommunicationService;\n import io.atomix.primitive.partition.Partition;\n import io.atomix.primitive.partition.PartitionMetadata;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.atomix.raft.RaftCommitListener;\n-import io.atomix.raft.RaftCommittedEntryListener;\n import io.atomix.raft.RaftRoleChangeListener;\n import io.atomix.raft.RaftServer;\n import io.atomix.raft.RaftServer.Role;\n@@ -205,16 +205,20 @@ public class RaftPartitionServer implements HealthMonitorable {\n }\n \n /**\n- * @see io.atomix.raft.impl.RaftContext#addCommittedEntryListener(RaftCommittedEntryListener)\n+ * @see\n+ * io.atomix.raft.impl.RaftContext#addCommittedEntryListener(RaftApplicationEntryCommittedPositionListener)\n */\n- public void addCommittedEntryListener(final RaftCommittedEntryListener commitListener) {\n+ public void addCommittedEntryListener(\n+ final RaftApplicationEntryCommittedPositionListener commitListener) {\n server.getContext().addCommittedEntryListener(commitListener);\n }\n \n /**\n- * @see io.atomix.raft.impl.RaftContext#removeCommittedEntryListener(RaftCommittedEntryListener)\n+ * @see\n+ * io.atomix.raft.impl.RaftContext#removeCommittedEntryListener(RaftApplicationEntryCommittedPositionListener)\n */\n- public void removeCommittedEntryListener(final RaftCommittedEntryListener commitListener) {\n+ public void removeCommittedEntryListener(\n+ final RaftApplicationEntryCommittedPositionListener commitListener) {\n server.getContext().removeCommittedEntryListener(commitListener);\n }\n \ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java b/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\nindex e54df1a..fcfd177 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\n@@ -630,27 +630,47 @@ public final class LeaderRole extends ActiveRole implements ZeebeLogAppender {\n \n private void replicate(final IndexedRaftLogEntry indexed, final AppendListener appendListener) {\n raft.checkThread();\n- appender\n- .appendEntries(indexed.index())\n- .whenCompleteAsync(\n- (commitIndex, commitError) -> {\n- if (!isRunning()) {\n- return;\n- }\n+ final var appendEntriesFuture = appender.appendEntries(indexed.index());\n+\n+ final boolean applicationEntryWasCommitted = indexed.isApplicationEntry();\n+ if (applicationEntryWasCommitted) {\n+ // We have some services which are waiting for the application records, especially position\n+ // to be committed. This is our glue code to notify them, instead of\n+ // passing the complete object (IndexedRaftLogEntry) threw the listeners and\n+ // keep them in heap until they are committed. This had the risk of going out of OOM\n+ // if records can't be committed, see https://github.com/camunda/zeebe/issues/14275\n+ final var committedPosition = indexed.getApplicationEntry().highestPosition();\n+ appendEntriesFuture.whenCompleteAsync(\n+ (commitIndex, commitError) -> {\n+ if (!isRunning()) {\n+ return;\n+ }\n+\n+ if (commitError == null) {\n+ raft.notifyApplicationEntryCommittedPositionListeners(committedPosition);\n+ }\n+ },\n+ raft.getThreadContext());\n+ }\n \n- // have the state machine apply the index which should do nothing but ensures it keeps\n- // up to date with the latest entries, so it can handle configuration and initial\n- // entries properly on fail over\n- if (commitError == null) {\n- appendListener.onCommit(indexed.index());\n- raft.notifyCommittedEntryListeners(indexed);\n- } else {\n- appendListener.onCommitError(indexed.index(), commitError);\n- // replicating the entry will be retried on the next append request\n- log.error(\"Failed to replicate entry: {}\", indexed, commitError);\n- }\n- },\n- raft.getThreadContext());\n+ appendEntriesFuture.whenCompleteAsync(\n+ (commitIndex, commitError) -> {\n+ if (!isRunning()) {\n+ return;\n+ }\n+\n+ // have the state machine apply the index which should do nothing but ensures it keeps\n+ // up to date with the latest entries, so it can handle configuration and initial\n+ // entries properly on fail over\n+ if (commitError == null) {\n+ appendListener.onCommit(indexed.index());\n+ } else {\n+ appendListener.onCommitError(indexed.index(), commitError);\n+ // replicating the entry will be retried on the next append request\n+ log.error(\"Failed to replicate entry: {}\", indexed, commitError);\n+ }\n+ },\n+ raft.getThreadContext());\n }\n \n public synchronized void onInitialEntriesCommitted(final Runnable runnable) {\ndiff --git a/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java b/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\nindex b217586..8029766 100644\n--- a/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\n+++ b/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\n@@ -82,7 +82,7 @@ public class RaftAppendTest {\n @Test\n public void shouldNotifyCommittedEntryListenerOnLeaderOnly() throws Throwable {\n // given\n- final var committedEntryListener = mock(RaftCommittedEntryListener.class);\n+ final var committedEntryListener = mock(RaftApplicationEntryCommittedPositionListener.class);\n raftRule.addCommittedEntryListener(committedEntryListener);\n \n // when\ndiff --git a/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java b/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\nindex 8f73cba..193a176 100644\n--- a/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\n+++ b/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\n@@ -644,9 +644,12 @@ public final class RaftRule extends ExternalResource {\n }\n \n public void addCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n servers.forEach(\n- (id, raft) -> raft.getContext().addCommittedEntryListener(raftCommittedEntryListener));\n+ (id, raft) ->\n+ raft.getContext()\n+ .addCommittedEntryListener(raftApplicationEntryCommittedPositionListener));\n }\n \n public void partition(final RaftServer follower) {\ndiff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\nindex a61571f..6c082d7 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n@@ -7,8 +7,7 @@\n */\n package io.camunda.zeebe.broker.system.partitions.impl;\n \n-import io.atomix.raft.RaftCommittedEntryListener;\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.camunda.zeebe.broker.system.partitions.NoEntryAtSnapshotPosition;\n import io.camunda.zeebe.broker.system.partitions.StateController;\n import io.camunda.zeebe.logstreams.impl.Loggers;\n@@ -36,7 +35,7 @@ import java.util.function.Consumer;\n import org.slf4j.Logger;\n \n public final class AsyncSnapshotDirector extends Actor\n- implements RaftCommittedEntryListener, HealthMonitorable {\n+ implements RaftApplicationEntryCommittedPositionListener, HealthMonitorable {\n \n public static final Duration MINIMUM_SNAPSHOT_PERIOD = Duration.ofMinutes(1);\n \n@@ -115,7 +114,7 @@ public final class AsyncSnapshotDirector extends Actor\n @Override\n protected void handleFailure(final Throwable failure) {\n LOG.error(\n- \"No snapshot was taken due to failure in '{}'. Will try to take snapshot after snapshot period {}. {}\",\n+ \"No snapshot was taken due to failure in '{}'. Will try to take snapshot after snapshot period {}.\",\n actorName,\n snapshotRate,\n failure);\n@@ -407,13 +406,8 @@ public final class AsyncSnapshotDirector extends Actor\n }\n \n @Override\n- public void onCommit(final IndexedRaftLogEntry indexedRaftLogEntry) {\n- // is called by the Leader Role and gives the last committed entry, where we\n- // can extract the highest position, which corresponds to the last committed position\n- if (indexedRaftLogEntry.isApplicationEntry()) {\n- final var committedPosition = indexedRaftLogEntry.getApplicationEntry().highestPosition();\n- newPositionCommitted(committedPosition);\n- }\n+ public void onCommit(final long committedPosition) {\n+ newPositionCommitted(committedPosition);\n }\n \n public void newPositionCommitted(final long currentCommitPosition) {\n", "diff --git a/.ci/scripts/distribution/analyse-java.sh b/.ci/scripts/distribution/analyse-java.sh\nindex a0122f7..0e965df 100755\n--- a/.ci/scripts/distribution/analyse-java.sh\n+++ b/.ci/scripts/distribution/analyse-java.sh\n@@ -23,12 +23,12 @@ else\n fi\n \n if [ \"${GIT_BRANCH}\" == \"master\" ] || [ \"${GIT_BRANCH}\" == \"develop\" ]; then\n- TARGET_BRANCH=\"master\"\n+ TARGET_BRANCH=\"${GIT_BRANCH}\"\n else\n TARGET_BRANCH=\"develop\"\n+ PROPERTIES+=(\"-Dsonar.branch.target=${TARGET_BRANCH}\")\n fi\n \n- PROPERTIES+=(\"-Dsonar.branch.target=${TARGET_BRANCH}\")\n git fetch --no-tags \"${GIT_URL}\" \"+refs/heads/${TARGET_BRANCH}:refs/remotes/origin/${TARGET_BRANCH}\"\n fi\n \ndiff --git a/parent/pom.xml b/parent/pom.xml\nindex f4c3160..d34b41f 100644\n--- a/parent/pom.xml\n+++ b/parent/pom.xml\n@@ -1570,7 +1570,7 @@\n <!-- sonarscanner integration -->\n <!-- sonar.login token must be passed at runtime to avoid sharing token -->\n <sonar.host.url>https://sonarcloud.io</sonar.host.url>\n- <sonar.organization>zeebe-io</sonar.organization>\n+ <sonar.organization>camunda-cloud</sonar.organization>\n <sonar.login>${env.SONARCLOUD_TOKEN}</sonar.login>\n <sonar.links.issue>${project.scm.url}/issues</sonar.links.issue>\n <sonar.cpd.exclusions>\n", "diff --git a/src/background/audio-manager.ts b/src/background/audio-manager.ts\nindex 84032f1..9e116fc 100644\n--- a/src/background/audio-manager.ts\n+++ b/src/background/audio-manager.ts\n@@ -1,4 +1,4 @@\n-import { timeout } from '@/_helpers/promise-more'\n+import { timer } from '@/_helpers/promise-more'\n \n /**\n * To make sure only one audio plays at a time\n@@ -16,6 +16,8 @@ export class AudioManager {\n \n private audio?: HTMLAudioElement\n \n+ currentSrc?: string\n+\n reset() {\n if (this.audio) {\n this.audio.pause()\n@@ -23,28 +25,33 @@ export class AudioManager {\n this.audio.src = ''\n this.audio.onended = null\n }\n+ this.currentSrc = ''\n }\n \n load(src: string): HTMLAudioElement {\n this.reset()\n+ this.currentSrc = src\n return (this.audio = new Audio(src))\n }\n \n async play(src?: string): Promise<void> {\n- if (!src) {\n+ if (!src || src === this.currentSrc) {\n this.reset()\n return\n }\n \n const audio = this.load(src)\n \n- const onEnd = new Promise(resolve => {\n- audio.onended = resolve\n- })\n+ const onEnd = Promise.race([\n+ new Promise(resolve => {\n+ audio.onended = resolve\n+ }),\n+ timer(20000)\n+ ])\n+\n+ await audio.play()\n+ await onEnd\n \n- await audio\n- .play()\n- .then(() => timeout(onEnd, 4000))\n- .catch(() => {})\n+ this.currentSrc = ''\n }\n }\ndiff --git a/src/background/server.ts b/src/background/server.ts\nindex 65f6f6c..4c70196 100644\n--- a/src/background/server.ts\n+++ b/src/background/server.ts\n@@ -64,6 +64,9 @@ export class BackgroundServer {\n return openURL(msg.payload.url, msg.payload.self)\n case 'PLAY_AUDIO':\n return AudioManager.getInstance().play(msg.payload)\n+ case 'STOP_AUDIO':\n+ AudioManager.getInstance().reset()\n+ return\n case 'FETCH_DICT_RESULT':\n return this.fetchDictResult(msg.payload)\n case 'DICT_ENGINE_METHOD':\n@@ -79,6 +82,7 @@ export class BackgroundServer {\n case 'OPEN_QS_PANEL':\n return this.openQSPanel()\n case 'CLOSE_QS_PANEL':\n+ AudioManager.getInstance().reset()\n return this.qsPanelManager.destroy()\n case 'QS_SWITCH_SIDEBAR':\n return this.qsPanelManager.toggleSidebar(msg.payload)\n@@ -105,6 +109,16 @@ export class BackgroundServer {\n return this.youdaoTranslateAjax(msg.payload)\n }\n })\n+\n+ browser.runtime.onConnect.addListener(port => {\n+ if (port.name === 'popup') {\n+ // This is a workaround for browser action page\n+ // which does not fire beforeunload event\n+ port.onDisconnect.addListener(() => {\n+ AudioManager.getInstance().reset()\n+ })\n+ }\n+ })\n }\n \n async openQSPanel(): Promise<void> {\ndiff --git a/src/content/redux/epics/index.ts b/src/content/redux/epics/index.ts\nindex b941c07..587b54d 100644\n--- a/src/content/redux/epics/index.ts\n+++ b/src/content/redux/epics/index.ts\n@@ -1,6 +1,6 @@\n import { combineEpics } from 'redux-observable'\n import { from, of, EMPTY } from 'rxjs'\n-import { map, mapTo, mergeMap, filter } from 'rxjs/operators'\n+import { map, mapTo, mergeMap, filter, pairwise } from 'rxjs/operators'\n \n import { isPopupPage, isStandalonePage } from '@/_helpers/saladict'\n import { saveWord } from '@/_helpers/record-manager'\n@@ -11,6 +11,7 @@ import { ofType } from './utils'\n import searchStartEpic from './searchStart.epic'\n import newSelectionEpic from './newSelection.epic'\n import { translateCtxs, genCtxText } from '@/_helpers/translateCtx'\n+import { message } from '@/_helpers/browser-api'\n \n export const epics = combineEpics<StoreAction, StoreAction, StoreState>(\n /** Start searching text. This will also send to Redux. */\n@@ -28,6 +29,17 @@ export const epics = combineEpics<StoreAction, StoreAction, StoreState>(\n )\n ),\n (action$, state$) =>\n+ state$.pipe(\n+ map(state => state.isShowDictPanel),\n+ pairwise(),\n+ mergeMap(([oldShow, newShow]) => {\n+ if (oldShow && !newShow) {\n+ message.send({ type: 'STOP_AUDIO' })\n+ }\n+ return EMPTY\n+ })\n+ ),\n+ (action$, state$) =>\n action$.pipe(\n ofType('ADD_TO_NOTEBOOK'),\n mergeMap(() => {\ndiff --git a/src/popup/index.tsx b/src/popup/index.tsx\nindex cbca1c0..a406bfd 100644\n--- a/src/popup/index.tsx\n+++ b/src/popup/index.tsx\n@@ -21,6 +21,10 @@ import Popup from './Popup'\n import Notebook from './Notebook'\n import './_style.scss'\n \n+// This is a workaround for browser action page\n+// which does not fire beforeunload event\n+browser.runtime.connect({ name: 'popup' } as any) // wrong typing\n+\n const Title: FC = () => {\n const { t } = useTranslate('popup')\n return (\ndiff --git a/src/typings/message.ts b/src/typings/message.ts\nindex bdd6fad..63238cb 100644\n--- a/src/typings/message.ts\n+++ b/src/typings/message.ts\n@@ -146,6 +146,8 @@ export type MessageConfig = MessageConfigType<{\n payload: string\n }\n \n+ STOP_AUDIO: {}\n+\n LAST_PLAY_AUDIO: {\n response?: null | { src: string; timestamp: number }\n }\n", "diff --git a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\nindex d895677..cf10e3f 100644\n--- a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\n+++ b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\n@@ -12,6 +12,7 @@ provider \"aws\" {\n resource \"aws_vpn_connection\" \"vpn_connection\" {\n customer_gateway_id = \"dummy-customer-gateway-id\"\n type = \"ipsec.1\"\n+ vpn_gateway_id = \"vpn-gateway-id\"\n }\n \n resource \"aws_vpn_connection\" \"transit\" {\n@@ -23,10 +24,11 @@ resource \"aws_vpn_connection\" \"transit\" {\n resource \"aws_vpn_connection\" \"vpn_connection_withUsage\" {\n customer_gateway_id = \"dummy-customer-gateway-id2\"\n type = \"ipsec.1\"\n+ vpn_gateway_id = \"vpn-gateway-id\"\n }\n \n resource \"aws_vpn_connection\" \"transit_withUsage\" {\n customer_gateway_id = \"dummy-customer-gateway-id2\"\n type = \"ipsec.1\"\n transit_gateway_id = \"dummy-transit-gateway-id2\"\n-}\n\\ No newline at end of file\n+}\n"]
5
["a9475f359061fcd6cd53557599fedf0df5e9ee00", "323cf81961cdd3748a7ba6ba470ecb13e5374e9f", "6cbbd98dfe6c768dbe49f8d6d2448856a9a86089", "97cabf49e7aca7754edde247003fbcb4ea42dd59", "9b059dd8245e72f0bf8c40fc633f9ef6fccae405"]
["feat", "refactor", "build", "fix", "test"]
Use arm64v8 postfix for Cube Store :dev build,rename ELECTRON_CACHE env variable to electron_config_cache (#21313),fix unstable MessageCorrelationTest,simplify loadFiles code,add react ecosystem
["diff --git a/.github/workflows/rust-cubestore-master.yml b/.github/workflows/rust-cubestore-master.yml\nindex 4a84984..bb07cd7 100644\n--- a/.github/workflows/rust-cubestore-master.yml\n+++ b/.github/workflows/rust-cubestore-master.yml\n@@ -115,9 +115,9 @@ jobs:\n if [[ $VERSION =~ ^v[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$ ]]; then\n MINOR=${VERSION%.*}\n MAJOR=${MINOR%.*}\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR},${DOCKER_IMAGE}:latest\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR}\"\n elif [ \"${{ github.event_name }}\" = \"push\" ]; then\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}${{ matrix.postfix }}\"\n fi\n \n echo ::set-output name=version::${VERSION}\n", "diff --git a/docs/tutorial/installation.md b/docs/tutorial/installation.md\nindex d4af120..1a09eea 100644\n--- a/docs/tutorial/installation.md\n+++ b/docs/tutorial/installation.md\n@@ -82,7 +82,7 @@ with the network at all.\n On environments that have been using older versions of Electron, you might find the\n cache also in `~/.electron`.\n \n-You can also override the local cache location by providing a `ELECTRON_CACHE`\n+You can also override the local cache location by providing a `electron_config_cache`\n environment variable.\n \n The cache contains the version's official zip file as well as a checksum, stored as\n", "diff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\nindex 0f5fed9..796393c 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\n@@ -27,7 +27,6 @@ import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n import static org.assertj.core.api.Assertions.assertThat;\n import static org.assertj.core.api.Assertions.entry;\n \n-import io.zeebe.UnstableTest;\n import io.zeebe.broker.test.EmbeddedBrokerRule;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n@@ -50,7 +49,6 @@ import org.agrona.DirectBuffer;\n import org.junit.Before;\n import org.junit.Rule;\n import org.junit.Test;\n-import org.junit.experimental.categories.Category;\n import org.junit.rules.RuleChain;\n import org.junit.runner.RunWith;\n import org.junit.runners.Parameterized;\n@@ -165,7 +163,7 @@ public class MessageCorrelationTest {\n \"receive-message\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n \n final SubscribedRecord messageSubscription =\n- findMessageSubscription(testClient, MessageSubscriptionIntent.OPENED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n assertThat(messageSubscription.valueType()).isEqualTo(ValueType.MESSAGE_SUBSCRIPTION);\n assertThat(messageSubscription.recordType()).isEqualTo(RecordType.EVENT);\n assertThat(messageSubscription.value())\n@@ -244,7 +242,7 @@ public class MessageCorrelationTest {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(\"wf\", asMsgPack(\"orderId\", \"order-123\"));\n \n- testClient.receiveFirstWorkflowInstanceEvent(WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n \n // when\n testClient.publishMessage(\"order canceled\", \"order-123\", asMsgPack(\"foo\", \"bar\"));\n@@ -308,13 +306,12 @@ public class MessageCorrelationTest {\n }\n \n @Test\n- @Category(UnstableTest.class) // => https://github.com/zeebe-io/zeebe/issues/1234\n public void shouldCorrelateMessageWithZeroTTL() throws Exception {\n // given\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(\"wf\", asMsgPack(\"orderId\", \"order-123\"));\n \n- testClient.receiveElementInState(\"receive-message\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\n \n // when\n testClient.publishMessage(\"order canceled\", \"order-123\", asMsgPack(\"foo\", \"bar\"), 0);\n@@ -499,10 +496,9 @@ public class MessageCorrelationTest {\n .containsEntry(\"activityInstanceKey\", catchEventEntered.key());\n }\n \n- private SubscribedRecord findMessageSubscription(\n- final TestPartitionClient client, final MessageSubscriptionIntent intent)\n+ private SubscribedRecord findMessageSubscription(final MessageSubscriptionIntent intent)\n throws AssertionError {\n- return client\n+ return testClient\n .receiveEvents()\n .filter(intent(intent))\n .findFirst()\n", "diff --git a/frontend/app/player/web/network/loadFiles.ts b/frontend/app/player/web/network/loadFiles.ts\nindex ec174fc..d164333 100644\n--- a/frontend/app/player/web/network/loadFiles.ts\n+++ b/frontend/app/player/web/network/loadFiles.ts\n@@ -1,43 +1,33 @@\n import APIClient from 'App/api_client';\n \n-const NO_NTH_FILE = \"nnf\"\n-const NO_UNPROCESSED_FILES = \"nuf\"\n+const NO_FILE_OK = \"No-file-but-this-is-ok\"\n+const NO_BACKUP_FILE = \"No-efs-file\"\n \n export const loadFiles = (\n urls: string[],\n onData: (data: Uint8Array) => void,\n ): Promise<void> => {\n- const firstFileURL = urls[0]\n- urls = urls.slice(1)\n- if (!firstFileURL) {\n+ if (!urls.length) {\n return Promise.reject(\"No urls provided\")\n }\n- return window.fetch(firstFileURL)\n- .then(r => {\n- return processAPIStreamResponse(r, true)\n- })\n- .then(onData)\n- .then(() =>\n- urls.reduce((p, url) =>\n- p.then(() =>\n- window.fetch(url)\n- .then(r => {\n- return processAPIStreamResponse(r, false)\n- })\n- .then(onData)\n- ),\n- Promise.resolve(),\n- )\n+ return urls.reduce((p, url, index) =>\n+ p.then(() =>\n+ window.fetch(url)\n+ .then(r => {\n+ return processAPIStreamResponse(r, index===0)\n+ })\n+ .then(onData)\n+ ),\n+ Promise.resolve(),\n )\n .catch(e => {\n- if (e === NO_NTH_FILE) {\n+ if (e === NO_FILE_OK) {\n return\n }\n throw e\n })\n }\n \n-\n export async function requestEFSDom(sessionId: string) {\n return await requestEFSMobFile(sessionId + \"/dom.mob\")\n }\n@@ -50,21 +40,18 @@ async function requestEFSMobFile(filename: string) {\n const api = new APIClient()\n const res = await api.fetch('/unprocessed/' + filename)\n if (res.status >= 400) {\n- throw NO_UNPROCESSED_FILES\n+ throw NO_BACKUP_FILE\n }\n return await processAPIStreamResponse(res, false)\n }\n \n-const processAPIStreamResponse = (response: Response, isFirstFile: boolean) => {\n+const processAPIStreamResponse = (response: Response, canBeMissed: boolean) => {\n return new Promise<ArrayBuffer>((res, rej) => {\n- if (response.status === 404 && !isFirstFile) {\n- return rej(NO_NTH_FILE)\n+ if (response.status === 404 && canBeMissed) {\n+ return rej(NO_FILE_OK)\n }\n if (response.status >= 400) {\n- return rej(\n- isFirstFile ? `no start file. status code ${ response.status }`\n- : `Bad endfile status code ${response.status}`\n- )\n+ return rej(`Bad file status code ${response.status}. Url: ${response.url}`)\n }\n res(response.arrayBuffer())\n }).then(buffer => new Uint8Array(buffer))\n", "diff --git a/package.json b/package.json\nindex 1ba8c4f..d1de9a0 100644\n--- a/package.json\n+++ b/package.json\n@@ -36,14 +36,19 @@\n \"@types/node\": \"^9.3.0\",\n \"@types/react\": \"^16.0.34\",\n \"@types/react-dom\": \"^16.0.3\",\n+ \"@types/react-motion\": \"^0.0.25\",\n \"bootstrap-sass\": \"^3.3.7\",\n \"highcharts\": \"^6.0.4\",\n \"html2canvas\": \"^1.0.0-alpha.9\",\n+ \"immer\": \"^1.2.1\",\n \"lodash\": \"^4.17.4\",\n \"moment\": \"^2.20.1\",\n \"normalize.css\": \"^8.0.0\",\n- \"react\": \"^16.2.0\",\n- \"react-dom\": \"^16.2.0\",\n+ \"react\": \"^16.3.1\",\n+ \"react-dom\": \"^16.3.1\",\n+ \"react-motion\": \"^0.5.2\",\n+ \"react-redux\": \"^5.0.7\",\n+ \"redux\": \"^3.7.2\",\n \"rxjs\": \"^5.5.6\",\n \"vue\": \"^2.5.13\",\n \"vue-plugin-webextension-i18n\": \"^0.1.0\",\ndiff --git a/yarn.lock b/yarn.lock\nindex c8898d8..5d0fc9f 100644\n--- a/yarn.lock\n+++ b/yarn.lock\n@@ -187,6 +187,12 @@\n \"@types/node\" \"*\"\n \"@types/react\" \"*\"\n \n+\"@types/react-motion@^0.0.25\":\n+ version \"0.0.25\"\n+ resolved \"https://registry.npmjs.org/@types/react-motion/-/react-motion-0.0.25.tgz#2445745ee8e8e6149faa47a36ff6b0d4c21dbf94\"\n+ dependencies:\n+ \"@types/react\" \"*\"\n+\n \"@types/react@*\", \"@types/react@^16.0.34\":\n version \"16.0.40\"\n resolved \"https://registry.npmjs.org/@types/react/-/react-16.0.40.tgz#caabc2296886f40b67f6fc80f0f3464476461df9\"\n@@ -3837,6 +3843,10 @@ [email protected]:\n version \"4.2.1\"\n resolved \"https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb\"\n \n+hoist-non-react-statics@^2.5.0:\n+ version \"2.5.0\"\n+ resolved \"https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.0.tgz#d2ca2dfc19c5a91c5a6615ce8e564ef0347e2a40\"\n+\n home-or-tmp@^2.0.0:\n version \"2.0.0\"\n resolved \"https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8\"\n@@ -4004,6 +4014,10 @@ ignore@^3.3.5:\n version \"3.3.7\"\n resolved \"https://registry.npmjs.org/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021\"\n \n+immer@^1.2.1:\n+ version \"1.2.1\"\n+ resolved \"https://registry.npmjs.org/immer/-/immer-1.2.1.tgz#96e2ae29cdfc428f28120b832701931b92fa597c\"\n+\n import-local@^1.0.0:\n version \"1.0.0\"\n resolved \"https://registry.npmjs.org/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc\"\n@@ -4104,7 +4118,7 @@ interpret@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614\"\n \n-invariant@^2.2.2:\n+invariant@^2.0.0, invariant@^2.2.2:\n version \"2.2.4\"\n resolved \"https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6\"\n dependencies:\n@@ -5040,6 +5054,10 @@ locate-path@^2.0.0:\n p-locate \"^2.0.0\"\n path-exists \"^3.0.0\"\n \n+lodash-es@^4.17.5, lodash-es@^4.2.1:\n+ version \"4.17.8\"\n+ resolved \"https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.8.tgz#6fa8c8c5d337481df0bdf1c0d899d42473121e45\"\n+\n lodash._reinterpolate@~3.0.0:\n version \"3.0.0\"\n resolved \"https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d\"\n@@ -5149,7 +5167,7 @@ [email protected]:\n version \"4.17.2\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz#34a3055babe04ce42467b607d700072c7ff6bf42\"\n \[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\[email protected], lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.3, lodash@^4.17.2, lodash@^4.17.3, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@~4.17.4:\n version \"4.17.5\"\n resolved \"https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz#99a92d65c0272debe8c96b6057bc8fbfa3bed511\"\n \n@@ -6467,7 +6485,7 @@ promise@^7.1.1:\n dependencies:\n asap \"~2.0.3\"\n \n-prop-types@^15.6.0:\n+prop-types@^15.5.8, prop-types@^15.6.0:\n version \"15.6.1\"\n resolved \"https://registry.npmjs.org/prop-types/-/prop-types-15.6.1.tgz#36644453564255ddda391191fb3a125cbdf654ca\"\n dependencies:\n@@ -6574,7 +6592,7 @@ quick-lru@^1.0.0:\n version \"1.1.0\"\n resolved \"https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8\"\n \[email protected]:\[email protected], raf@^3.1.0:\n version \"3.4.0\"\n resolved \"https://registry.npmjs.org/raf/-/raf-3.4.0.tgz#a28876881b4bc2ca9117d4138163ddb80f781575\"\n dependencies:\n@@ -6645,9 +6663,9 @@ react-dev-utils@^5.0.0:\n strip-ansi \"3.0.1\"\n text-table \"0.2.0\"\n \n-react-dom@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.2.0.tgz#69003178601c0ca19b709b33a83369fe6124c044\"\n+react-dom@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react-dom/-/react-dom-16.3.1.tgz#6a3c90a4fb62f915bdbcf6204422d93a7d4ca573\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6658,9 +6676,28 @@ react-error-overlay@^4.0.0:\n version \"4.0.0\"\n resolved \"https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-4.0.0.tgz#d198408a85b4070937a98667f500c832f86bd5d4\"\n \n-react@^16.2.0:\n- version \"16.2.0\"\n- resolved \"https://registry.npmjs.org/react/-/react-16.2.0.tgz#a31bd2dab89bff65d42134fa187f24d054c273ba\"\n+react-motion@^0.5.2:\n+ version \"0.5.2\"\n+ resolved \"https://registry.npmjs.org/react-motion/-/react-motion-0.5.2.tgz#0dd3a69e411316567927917c6626551ba0607316\"\n+ dependencies:\n+ performance-now \"^0.2.0\"\n+ prop-types \"^15.5.8\"\n+ raf \"^3.1.0\"\n+\n+react-redux@^5.0.7:\n+ version \"5.0.7\"\n+ resolved \"https://registry.npmjs.org/react-redux/-/react-redux-5.0.7.tgz#0dc1076d9afb4670f993ffaef44b8f8c1155a4c8\"\n+ dependencies:\n+ hoist-non-react-statics \"^2.5.0\"\n+ invariant \"^2.0.0\"\n+ lodash \"^4.17.5\"\n+ lodash-es \"^4.17.5\"\n+ loose-envify \"^1.1.0\"\n+ prop-types \"^15.6.0\"\n+\n+react@^16.3.1:\n+ version \"16.3.1\"\n+ resolved \"https://registry.npmjs.org/react/-/react-16.3.1.tgz#4a2da433d471251c69b6033ada30e2ed1202cfd8\"\n dependencies:\n fbjs \"^0.8.16\"\n loose-envify \"^1.1.0\"\n@@ -6788,6 +6825,15 @@ reduce-function-call@^1.0.1:\n dependencies:\n balanced-match \"^0.4.2\"\n \n+redux@^3.7.2:\n+ version \"3.7.2\"\n+ resolved \"https://registry.npmjs.org/redux/-/redux-3.7.2.tgz#06b73123215901d25d065be342eb026bc1c8537b\"\n+ dependencies:\n+ lodash \"^4.2.1\"\n+ lodash-es \"^4.2.1\"\n+ loose-envify \"^1.1.0\"\n+ symbol-observable \"^1.0.3\"\n+\n regenerate@^1.2.1:\n version \"1.3.3\"\n resolved \"https://registry.npmjs.org/regenerate/-/regenerate-1.3.3.tgz#0c336d3980553d755c39b586ae3b20aa49c82b7f\"\n@@ -7811,6 +7857,10 @@ [email protected]:\n version \"1.0.1\"\n resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4\"\n \n+symbol-observable@^1.0.3:\n+ version \"1.2.0\"\n+ resolved \"https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804\"\n+\n symbol-tree@^3.2.2:\n version \"3.2.2\"\n resolved \"https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6\"\n"]
5
["10bdcb452ff9d2b884d45a9c43a4b8a20fc4a883", "f2f52c23b513dd857350f3c163f676d37189d0d3", "98bed2a8137930149559bc1cae9bd34a1a75e556", "983fef55ef08ca2ca25349bb2d5bdff10ecf89f4", "7e04a5e829d7416e312ac342a00a11787745753b"]
["ci", "docs", "test", "refactor", "build"]
parallelize pybind11 build,get ip from forwarded header,use new freespace config for disk space recory test,new ShowDebug parameter calculate each segment timing new parameter to show/hide segment debug information set-poshprompt updated with the new showDebug parameter Force disabled segment to be visible for debug purpose,group example
["diff --git a/poetry-overrides.nix b/poetry-overrides.nix\nindex d37c5ed..aaaaf02 100644\n--- a/poetry-overrides.nix\n+++ b/poetry-overrides.nix\n@@ -82,4 +82,11 @@ self: super:\n {\n patches = (attrs.patches or [ ]) ++ [ ./patches/watchdog-force-kqueue.patch ];\n });\n+\n+ pybind11 = super.pybind11.overridePythonAttrs (_: {\n+ postBuild = ''\n+ # build tests\n+ make -j $NIX_BUILD_CORES -l $NIX_BUILD_CORES\n+ '';\n+ });\n }\n", "diff --git a/kousa/lib/broth/socket_handler.ex b/kousa/lib/broth/socket_handler.ex\nindex d142135..5828f30 100644\n--- a/kousa/lib/broth/socket_handler.ex\n+++ b/kousa/lib/broth/socket_handler.ex\n@@ -22,7 +22,7 @@ defmodule Broth.SocketHandler do\n ## initialization boilerplate\n \n @impl true\n- def init(request = %{peer: {ip, _reverse_port}}, _state) do\n+ def init(request, _state) do\n props = :cowboy_req.parse_qs(request)\n \n compression =\n@@ -37,10 +37,16 @@ defmodule Broth.SocketHandler do\n _ -> :json\n end\n \n+ ip =\n+ case request.headers do\n+ %{\"x-forwarded-for\" => v} -> v\n+ _ -> nil\n+ end\n+\n state = %__MODULE__{\n awaiting_init: true,\n user_id: nil,\n- ip: IP.to_string(ip),\n+ ip: ip,\n encoding: encoding,\n compression: compression,\n callers: get_callers(request)\ndiff --git a/kousa/test/_support/ws_client.ex b/kousa/test/_support/ws_client.ex\nindex aeca704..125da17 100644\n--- a/kousa/test/_support/ws_client.ex\n+++ b/kousa/test/_support/ws_client.ex\n@@ -19,7 +19,9 @@ defmodule BrothTest.WsClient do\n \n @api_url\n |> Path.join(\"socket\")\n- |> WebSockex.start_link(__MODULE__, nil, extra_headers: [{\"user-agent\", ancestors}])\n+ |> WebSockex.start_link(__MODULE__, nil,\n+ extra_headers: [{\"user-agent\", ancestors}, {\"x-forwarded-for\", \"127.0.0.1\"}]\n+ )\n end\n \n ###########################################################################\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n", "diff --git a/engine.go b/engine.go\nindex 6cc1ff3..4617ceb 100644\n--- a/engine.go\n+++ b/engine.go\n@@ -67,6 +67,9 @@ func (e *engine) renderText(text string) {\n \tprefix := e.activeSegment.getValue(Prefix, \" \")\n \tpostfix := e.activeSegment.getValue(Postfix, \" \")\n \te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"%s%s%s\", prefix, text, postfix))\n+\tif *e.env.getArgs().Debug {\n+\t\te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"(%s:%s)\", e.activeSegment.Type, e.activeSegment.timing))\n+\t}\n }\n \n func (e *engine) renderSegmentText(text string) {\n@@ -107,13 +110,11 @@ func (e *engine) setStringValues(segments []*Segment) {\n \twg.Add(len(segments))\n \tdefer wg.Wait()\n \tcwd := e.env.getcwd()\n+\tdebug := *e.env.getArgs().Debug\n \tfor _, segment := range segments {\n \t\tgo func(s *Segment) {\n \t\t\tdefer wg.Done()\n-\t\t\terr := s.mapSegmentWithWriter(e.env)\n-\t\t\tif err == nil && !s.hasValue(IgnoreFolders, cwd) && s.enabled() {\n-\t\t\t\ts.stringValue = s.string()\n-\t\t\t}\n+\t\t\ts.setStringValue(e.env, cwd, debug)\n \t\t}(segment)\n \t}\n }\ndiff --git a/main.go b/main.go\nindex 56ae8a5..d67a640 100644\n--- a/main.go\n+++ b/main.go\n@@ -14,6 +14,7 @@ type args struct {\n \tConfig *string\n \tShell *string\n \tPWD *string\n+\tDebug *bool\n }\n \n func main() {\n@@ -42,6 +43,10 @@ func main() {\n \t\t\t\"pwd\",\n \t\t\t\"\",\n \t\t\t\"the path you are working in\"),\n+\t\tDebug: flag.Bool(\n+\t\t\t\"debug\",\n+\t\t\tfalse,\n+\t\t\t\"Print debug information\"),\n \t}\n \tflag.Parse()\n \tenv := &environment{\ndiff --git a/packages/powershell/oh-my-posh/oh-my-posh.psm1 b/packages/powershell/oh-my-posh/oh-my-posh.psm1\nindex 9234fc6..1450eb3 100644\n--- a/packages/powershell/oh-my-posh/oh-my-posh.psm1\n+++ b/packages/powershell/oh-my-posh/oh-my-posh.psm1\n@@ -5,6 +5,7 @@\n \n $global:PoshSettings = New-Object -TypeName PSObject -Property @{\n Theme = \"$PSScriptRoot\\themes\\jandedobbeleer.json\";\n+ ShowDebug = $false\n }\n \n function Get-PoshCommand {\n@@ -36,9 +37,14 @@ function Set-PoshPrompt {\n param(\n [Parameter(Mandatory = $false)]\n [string]\n- $Theme\n+ $Theme,\n+ [Parameter(Mandatory = $false)]\n+ [bool]\n+ $ShowDebug = $false\n )\n \n+ $global:PoshSettings.ShowDebug = $ShowDebug\n+\n if (Test-Path \"$PSScriptRoot/themes/$Theme.json\") {\n $global:PoshSettings.Theme = \"$PSScriptRoot/themes/$Theme.json\"\n }\n@@ -68,8 +74,9 @@ function Set-PoshPrompt {\n $startInfo = New-Object System.Diagnostics.ProcessStartInfo\n $startInfo.FileName = Get-PoshCommand\n $config = $global:PoshSettings.Theme\n+ $showDebug = $global:PoshSettings.ShowDebug\n $cleanPWD = $PWD.ProviderPath.TrimEnd(\"\\\")\n- $startInfo.Arguments = \"-config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n+ $startInfo.Arguments = \"-debug=\"\"$showDebug\"\" -config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n $startInfo.Environment[\"TERM\"] = \"xterm-256color\"\n $startInfo.CreateNoWindow = $true\n $startInfo.StandardOutputEncoding = [System.Text.Encoding]::UTF8\ndiff --git a/segment.go b/segment.go\nindex 27dd416..4015dac 100644\n--- a/segment.go\n+++ b/segment.go\n@@ -1,6 +1,9 @@\n package main\n \n-import \"errors\"\n+import (\n+\t\"errors\"\n+\t\"time\"\n+)\n \n // Segment represent a single segment and it's configuration\n type Segment struct {\n@@ -17,6 +20,7 @@ type Segment struct {\n \twriter SegmentWriter\n \tstringValue string\n \tactive bool\n+\ttiming time.Duration\n }\n \n // SegmentWriter is the interface used to define what and if to write to the prompt\n@@ -149,3 +153,26 @@ func (segment *Segment) mapSegmentWithWriter(env environmentInfo) error {\n \t}\n \treturn errors.New(\"unable to map writer\")\n }\n+\n+func (segment *Segment) setStringValue(env environmentInfo, cwd string, debug bool) {\n+\terr := segment.mapSegmentWithWriter(env)\n+\tif err != nil || segment.hasValue(IgnoreFolders, cwd) {\n+\t\treturn\n+\t}\n+\t// add timing only in debug\n+\tif debug {\n+\t\tstart := time.Now()\n+\t\tdefer (func() {\n+\t\t\t// force segment rendering to display the time it took\n+\t\t\t// to check if the segment is enabled or not\n+\t\t\t// depending on the segement, calling enabled()\n+\t\t\t// can be time consuming\n+\t\t\tsegment.active = true\n+\t\t\telapsed := time.Since(start)\n+\t\t\tsegment.timing = elapsed\n+\t\t})()\n+\t}\n+\tif segment.enabled() {\n+\t\tsegment.stringValue = segment.string()\n+\t}\n+}\n", "diff --git a/src/build/arg_group.rs b/src/build/arg_group.rs\nindex 5201e97..e1b1991 100644\n--- a/src/build/arg_group.rs\n+++ b/src/build/arg_group.rs\n@@ -43,7 +43,7 @@ use crate::util::{Id, Key};\n /// .arg(\"--minor 'auto increase minor'\")\n /// .arg(\"--patch 'auto increase patch'\")\n /// .group(ArgGroup::with_name(\"vers\")\n-/// .args(&[\"set-ver\", \"major\", \"minor\",\"patch\"])\n+/// .args(&[\"set-ver\", \"major\", \"minor\", \"patch\"])\n /// .required(true))\n /// .try_get_matches_from(vec![\"app\", \"--major\", \"--patch\"]);\n /// // Because we used two args in the group it's an error\n"]
5
["9ab4c61975e073e214646443d088339cfdbaa88d", "2f5718743a830d40ddf272ad46f253dbb6d08cff", "672cd2b9775fb6dac2d522cb3f4469db47c0556b", "bea32587586ca08f390c901a95e9b9c25263f4df", "9849430b11b92ae58d94cfe4d0b06313c7eab550"]
["build", "fix", "test", "feat", "docs"]
add `to_sql` Co-authored-by: Gil Forsyth <[email protected]>,Introduce timediff fn (stub),Fix windows build,dashboard removed unused code,switch QA to new testbench-1.x-prod In order to use the new Testbench that is compatible with Zeebe 1.x versions, this switches the client id and secrets used by the QA stage.
["diff --git a/docs/api/expressions/top_level.md b/docs/api/expressions/top_level.md\nindex efaffbd..34b529e 100644\n--- a/docs/api/expressions/top_level.md\n+++ b/docs/api/expressions/top_level.md\n@@ -28,7 +28,7 @@ These methods and objects are available directly in the `ibis` module.\n ::: ibis.or_\n ::: ibis.param\n ::: ibis.show_sql\n-::: ibis.sql\n+::: ibis.to_sql\n ::: ibis.random\n ::: ibis.range_window\n ::: ibis.row_number\n", "diff --git a/rust/Cargo.lock b/rust/Cargo.lock\nindex b42616f..4795eb6 100644\n--- a/rust/Cargo.lock\n+++ b/rust/Cargo.lock\n@@ -1287,7 +1287,7 @@ dependencies = [\n [[package]]\n name = \"datafusion\"\n version = \"5.1.0\"\n-source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=8df4132b83d896a0d3db5c82a4eaaa3eaa285d15#8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\"\n+source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=868f3c4de13d13cda84cee33475b9782b94fa60c#868f3c4de13d13cda84cee33475b9782b94fa60c\"\n dependencies = [\n \"ahash 0.7.4\",\n \"arrow 6.0.0\",\ndiff --git a/rust/cubesql/Cargo.toml b/rust/cubesql/Cargo.toml\nindex 3cb386a..9aef494 100644\n--- a/rust/cubesql/Cargo.toml\n+++ b/rust/cubesql/Cargo.toml\n@@ -9,7 +9,7 @@ documentation = \"https://cube.dev/docs\"\n homepage = \"https://cube.dev\"\n \n [dependencies]\n-datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\", default-features = false, features = [\"unicode_expressions\"] }\n+datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"868f3c4de13d13cda84cee33475b9782b94fa60c\", default-features = false, features = [\"unicode_expressions\"] }\n anyhow = \"1.0\"\n thiserror = \"1.0\"\n cubeclient = { path = \"../cubeclient\" }\ndiff --git a/rust/cubesql/src/compile/engine/df/intervals.rs b/rust/cubesql/src/compile/engine/df/intervals.rs\nnew file mode 100644\nindex 0000000..9e6cb7e\n--- /dev/null\n+++ b/rust/cubesql/src/compile/engine/df/intervals.rs\n@@ -0,0 +1,51 @@\n+#[macro_export]\n+macro_rules! make_string_interval_year_month {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let interval = $array.value($row) as f64;\n+ let years = (interval / 12_f64).floor();\n+ let month = interval - (years * 12_f64);\n+\n+ format!(\n+ \"{} years {} mons 0 days 0 hours 0 mins 0.00 secs\",\n+ years, month,\n+ )\n+ };\n+\n+ s\n+ }};\n+}\n+\n+#[macro_export]\n+macro_rules! make_string_interval_day_time {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let value: u64 = $array.value($row) as u64;\n+\n+ let days_parts: i32 = ((value & 0xFFFFFFFF00000000) >> 32) as i32;\n+ let milliseconds_part: i32 = (value & 0xFFFFFFFF) as i32;\n+\n+ let secs = milliseconds_part / 1000;\n+ let mins = secs / 60;\n+ let hours = mins / 60;\n+\n+ let secs = secs - (mins * 60);\n+ let mins = mins - (hours * 60);\n+\n+ format!(\n+ \"0 years 0 mons {} days {} hours {} mins {}.{:02} secs\",\n+ days_parts,\n+ hours,\n+ mins,\n+ secs,\n+ (milliseconds_part % 1000),\n+ )\n+ };\n+\n+ s\n+ }};\n+}\ndiff --git a/rust/cubesql/src/compile/engine/df/mod.rs b/rust/cubesql/src/compile/engine/df/mod.rs\nindex a19a970..3097523 100644\n--- a/rust/cubesql/src/compile/engine/df/mod.rs\n+++ b/rust/cubesql/src/compile/engine/df/mod.rs\n@@ -1 +1,2 @@\n pub mod coerce;\n+pub mod intervals;\ndiff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex 55b8bc1..0e160b3 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -1,14 +1,19 @@\n use std::any::type_name;\n use std::sync::Arc;\n \n+\n use datafusion::{\n arrow::{\n array::{\n ArrayRef, BooleanArray, BooleanBuilder, GenericStringArray, Int32Builder,\n- PrimitiveArray, StringBuilder, UInt32Builder,\n+ IntervalDayTimeBuilder, PrimitiveArray, StringBuilder,\n+ UInt32Builder,\n },\n compute::cast,\n- datatypes::{DataType, Int64Type},\n+ datatypes::{\n+ DataType, Int64Type, IntervalUnit, TimeUnit,\n+ TimestampNanosecondType,\n+ },\n },\n error::DataFusionError,\n logical_plan::create_udf,\n@@ -399,3 +404,63 @@ pub fn create_convert_tz_udf() -> ScalarUDF {\n &fun,\n )\n }\n+\n+pub fn create_timediff_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 2);\n+\n+ let left_dt = &args[0];\n+ let right_dt = &args[1];\n+\n+ let left_date = match left_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(left_dt, \"left_dt\", TimestampNanosecondType);\n+ let ts = arr.value(0);\n+\n+ // NaiveDateTime::from_timestamp(ts, 0)\n+ ts\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"left_dt argument must be a Timestamp, actual: {}\",\n+ left_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let right_date = match right_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(right_dt, \"right_dt\", TimestampNanosecondType);\n+ arr.value(0)\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"right_dt argument must be a Timestamp, actual: {}\",\n+ right_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let diff = right_date - left_date;\n+ if diff != 0 {\n+ return Err(DataFusionError::NotImplemented(format!(\n+ \"timediff is not implemented, it's stub\"\n+ )));\n+ }\n+\n+ let mut interal_arr = IntervalDayTimeBuilder::new(1);\n+ interal_arr.append_value(diff)?;\n+\n+ Ok(Arc::new(interal_arr.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime))));\n+\n+ ScalarUDF::new(\n+ \"timediff\",\n+ &Signature::any(2, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex a88da57..6121aa0 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -32,8 +32,8 @@ use self::engine::context::SystemVar;\n use self::engine::provider::CubeContext;\n use self::engine::udf::{\n create_connection_id_udf, create_convert_tz_udf, create_current_user_udf, create_db_udf,\n- create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_user_udf,\n- create_version_udf,\n+ create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_timediff_udf,\n+ create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1450,6 +1450,7 @@ impl QueryPlanner {\n ctx.register_udf(create_if_udf());\n ctx.register_udf(create_least_udf());\n ctx.register_udf(create_convert_tz_udf());\n+ ctx.register_udf(create_timediff_udf());\n \n let state = ctx.state.lock().unwrap().clone();\n let cube_ctx = CubeContext::new(&state, &self.context.cubes);\n@@ -3226,6 +3227,25 @@ mod tests {\n }\n \n #[tokio::test]\n+ async fn test_timediff() -> Result<(), CubeError> {\n+ assert_eq!(\n+ execute_df_query(\n+ \"select \\\n+ timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1\n+ \".to_string()\n+ )\n+ .await?,\n+ \"+------------------------------------------------+\\n\\\n+ | r1 |\\n\\\n+ +------------------------------------------------+\\n\\\n+ | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\\n\\\n+ +------------------------------------------------+\"\n+ );\n+\n+ Ok(())\n+ }\n+\n+ #[tokio::test]\n async fn test_metabase() -> Result<(), CubeError> {\n assert_eq!(\n execute_df_query(\ndiff --git a/rust/cubesql/src/mysql/dataframe.rs b/rust/cubesql/src/mysql/dataframe.rs\nindex fa246aa..2443458 100644\n--- a/rust/cubesql/src/mysql/dataframe.rs\n+++ b/rust/cubesql/src/mysql/dataframe.rs\n@@ -3,9 +3,10 @@ use std::fmt::{self, Debug, Formatter};\n use chrono::{SecondsFormat, TimeZone, Utc};\n use comfy_table::{Cell, Table};\n use datafusion::arrow::array::{\n- Array, Float64Array, Int32Array, Int64Array, StringArray, TimestampMicrosecondArray,\n- UInt32Array,\n+ Array, Float64Array, Int32Array, Int64Array, IntervalDayTimeArray, IntervalYearMonthArray,\n+ StringArray, TimestampMicrosecondArray, UInt32Array,\n };\n+use datafusion::arrow::datatypes::IntervalUnit;\n use datafusion::arrow::{\n array::{BooleanArray, TimestampNanosecondArray, UInt64Array},\n datatypes::{DataType, TimeUnit},\n@@ -15,6 +16,7 @@ use log::{error, warn};\n use msql_srv::{ColumnFlags, ColumnType};\n \n use crate::{compile::builder::CompiledQueryFieldMeta, CubeError};\n+use crate::{make_string_interval_day_time, make_string_interval_year_month};\n \n #[derive(Clone, Debug)]\n pub struct Column {\n@@ -309,6 +311,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro\n DataType::Binary => Ok(ColumnType::MYSQL_TYPE_BLOB),\n DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Timestamp(_, _) => Ok(ColumnType::MYSQL_TYPE_STRING),\n+ DataType::Interval(_) => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Float16 | DataType::Float64 => Ok(ColumnType::MYSQL_TYPE_DOUBLE),\n DataType::Boolean => Ok(ColumnType::MYSQL_TYPE_TINY),\n DataType::Int8\n@@ -402,6 +405,24 @@ pub fn batch_to_dataframe(batches: &Vec<RecordBatch>) -> Result<DataFrame, CubeE\n });\n }\n }\n+ DataType::Interval(IntervalUnit::DayTime) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalDayTimeArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_day_time!(a, i)));\n+ }\n+ }\n+ DataType::Interval(IntervalUnit::YearMonth) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalYearMonthArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_year_month!(a, i)));\n+ }\n+ }\n DataType::Boolean => {\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n for i in 0..num_rows {\n", "diff --git a/src/fs/mounts/mod.rs b/src/fs/mounts/mod.rs\nindex a7f8188..662e2f5 100644\n--- a/src/fs/mounts/mod.rs\n+++ b/src/fs/mounts/mod.rs\n@@ -29,11 +29,14 @@ impl std::error::Error for Error {}\n \n impl std::fmt::Display for Error {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n+ // Allow unreachable_patterns for windows build\n+ #[allow(unreachable_patterns)]\n match self {\n #[cfg(target_os = \"macos\")]\n Error::GetFSStatError(err) => write!(f, \"getfsstat failed: {err}\"),\n #[cfg(target_os = \"linux\")]\n- Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\")\n+ Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\"),\n+ _ => write!(f, \"Unknown error\"),\n }\n }\n }\n\\ No newline at end of file\ndiff --git a/src/main.rs b/src/main.rs\nindex 483e14d..ca28081 100644\n--- a/src/main.rs\n+++ b/src/main.rs\n@@ -62,6 +62,8 @@ mod theme;\n // to `clap` is complete.\n lazy_static! {\n static ref ALL_MOUNTS: HashMap<PathBuf, mounts::MountedFs> = {\n+ // Allow unused_mut for windows\n+ #[allow(unused_mut)]\n let mut mount_map: HashMap<PathBuf, mounts::MountedFs> = HashMap::new();\n \n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n", "diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py\nindex e5bc800..1afa105 100644\n--- a/api/chalicelib/core/dashboard.py\n+++ b/api/chalicelib/core/dashboard.py\n@@ -126,13 +126,6 @@ SESSIONS_META_FIELDS = {\"revId\": \"rev_id\",\n \"browser\": \"user_browser\"}\n \n \n-def __get_domains_errors_neutral(rows):\n- neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}\n- if len(neutral.keys()) == 0:\n- neutral = {\"All\": 0}\n- return neutral\n-\n-\n def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),\n endTimestamp=TimeUTC.now(),\n density=7, **args):\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 176ab58..bead402 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -326,7 +326,7 @@ pipeline {\n TAG = \"${env.VERSION}-${env.GIT_COMMIT}\"\n DOCKER_GCR = credentials(\"zeebe-gcr-serviceaccount-json\")\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\n QA_RUN_VARIABLES = \"{\\\"zeebeImage\\\": \\\"${env.IMAGE}:${env.TAG}\\\", \\\"generationTemplate\\\": \\\"${params.GENERATION_TEMPLATE}\\\", \" +\n \"\\\"channel\\\": \\\"Internal Dev\\\", \\\"branch\\\": \\\"${env.BRANCH_NAME}\\\", \\\"build\\\": \\\"${currentBuild.absoluteUrl}\\\", \" +\n \"\\\"businessKey\\\": \\\"${currentBuild.absoluteUrl}\\\", \\\"processId\\\": \\\"qa-protocol\\\"}\"\n@@ -341,7 +341,7 @@ pipeline {\n withVault(\n [vaultSecrets:\n [\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\n secretValues:\n [\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\n"]
5
["e2821a56c7d867b8b591f1777019843a2ffca797", "29dfb9716298c5a579c0ffba6742e13a29325670", "81ca000c6a7e7435809081c60be37dda23458ec8", "b7b1d2b315443e1854403c8fe8f871c4632b5d31", "c81a0c2999454c859b4bf4da5779712960d239be"]
["docs", "feat", "build", "refactor", "ci"]
allow users to share their playground session,switch QA to new testbench-1.x-prod In order to use the new Testbench that is compatible with Zeebe 1.x versions, this switches the client id and secrets used by the QA stage.,verify the replay mode * write a test to verify the different replay modes,dashboard removed unused code,101: fix import key cmd Signed-off-by: Sam Alba <[email protected]>
["diff --git a/playground/docker-compose.yml b/playground/docker-compose.yml\nnew file mode 100644\nindex 0000000..b8ac6aa\n--- /dev/null\n+++ b/playground/docker-compose.yml\n@@ -0,0 +1,18 @@\n+version: '3.3'\n+\n+services:\n+ db:\n+ container_name: panda-mysql\n+ image: mariadb:10.7.1-focal\n+ restart: always\n+ ports:\n+ - 3310:3306\n+ environment:\n+ MARIADB_ROOT_PASSWORD: root\n+ MARIADB_DATABASE: panda\n+ volumes:\n+ - panda-mysql:/var/lib/mysql\n+\n+volumes:\n+ panda-mysql:\n+ driver: local\ndiff --git a/playground/package.json b/playground/package.json\nindex eab6f62..0feccbb 100644\n--- a/playground/package.json\n+++ b/playground/package.json\n@@ -9,6 +9,9 @@\n \"start\": \"next start\",\n \"lint\": \"next lint\",\n \"dev\": \"next dev\",\n+ \"db:start\": \"docker-compose up -d\",\n+ \"db:stop\": \"docker-compose down\",\n+ \"db:push\": \"prisma db push --skip-generate\",\n \"db:generate\": \"prisma generate\",\n \"db:reset\": \"prisma migrate reset\",\n \"db:studio\": \"prisma studio\"\ndiff --git a/playground/prisma/dev.db b/playground/prisma/dev.db\ndeleted file mode 100644\nindex aa8281f..0000000\nBinary files a/playground/prisma/dev.db and /dev/null differ\ndiff --git a/playground/prisma/migrations/20230204163131_init/migration.sql b/playground/prisma/migrations/20230204163131_init/migration.sql\ndeleted file mode 100644\nindex b3c34f7..0000000\n--- a/playground/prisma/migrations/20230204163131_init/migration.sql\n+++ /dev/null\n@@ -1,8 +0,0 @@\n--- CreateTable\n-CREATE TABLE \"Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"config\" TEXT NOT NULL,\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\ndiff --git a/playground/prisma/migrations/20230208183556_/migration.sql b/playground/prisma/migrations/20230208183556_/migration.sql\ndeleted file mode 100644\nindex 619fd84..0000000\n--- a/playground/prisma/migrations/20230208183556_/migration.sql\n+++ /dev/null\n@@ -1,20 +0,0 @@\n-/*\n- Warnings:\n-\n- - You are about to drop the column `config` on the `Session` table. All the data in the column will be lost.\n-\n-*/\n--- RedefineTables\n-PRAGMA foreign_keys=OFF;\n-CREATE TABLE \"new_Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"theme\" TEXT NOT NULL DEFAULT '',\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\n-INSERT INTO \"new_Session\" (\"code\", \"createdAt\", \"id\", \"view\") SELECT \"code\", \"createdAt\", \"id\", \"view\" FROM \"Session\";\n-DROP TABLE \"Session\";\n-ALTER TABLE \"new_Session\" RENAME TO \"Session\";\n-PRAGMA foreign_key_check;\n-PRAGMA foreign_keys=ON;\ndiff --git a/playground/prisma/migrations/20230529181831_init/migration.sql b/playground/prisma/migrations/20230529181831_init/migration.sql\nnew file mode 100644\nindex 0000000..ffe5546\n--- /dev/null\n+++ b/playground/prisma/migrations/20230529181831_init/migration.sql\n@@ -0,0 +1,9 @@\n+-- CreateTable\n+CREATE TABLE `Session` (\n+ `id` VARCHAR(191) NOT NULL,\n+ `code` TEXT NOT NULL,\n+ `theme` TEXT NOT NULL,\n+ `createdAt` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3),\n+\n+ PRIMARY KEY (`id`)\n+) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;\ndiff --git a/playground/prisma/migrations/migration_lock.toml b/playground/prisma/migrations/migration_lock.toml\nindex e5e5c47..e5a788a 100644\n--- a/playground/prisma/migrations/migration_lock.toml\n+++ b/playground/prisma/migrations/migration_lock.toml\n@@ -1,3 +1,3 @@\n # Please do not edit this file manually\n # It should be added in your version-control system (i.e. Git)\n-provider = \"sqlite\"\n\\ No newline at end of file\n+provider = \"mysql\"\n\\ No newline at end of file\ndiff --git a/playground/prisma/schema.prisma b/playground/prisma/schema.prisma\nindex e84678a..9e1281e 100644\n--- a/playground/prisma/schema.prisma\n+++ b/playground/prisma/schema.prisma\n@@ -2,16 +2,14 @@ generator client {\n provider = \"prisma-client-js\"\n }\n \n-// Using SQLite for local development\n datasource db {\n- provider = \"sqlite\"\n- url = \"file:dev.db\"\n+ provider = \"mysql\"\n+ url = env(\"DATABASE_URL\")\n }\n \n model Session {\n- id String @id\n- code String\n- theme String @default(\"\")\n- view String @default(\"code\")\n+ id String @id @default(cuid())\n+ code String @db.Text\n+ theme String @db.Text\n createdAt DateTime @default(now())\n }\ndiff --git a/playground/src/app/[id]/page.tsx b/playground/src/app/[id]/page.tsx\nindex 40c21f0..a88d2b9 100644\n--- a/playground/src/app/[id]/page.tsx\n+++ b/playground/src/app/[id]/page.tsx\n@@ -6,9 +6,9 @@ const Page = async (props: any) => {\n params: { id },\n } = props\n \n- const initialState = await prisma?.session.findFirst({\n+ const initialState = await prisma.session.findFirst({\n where: { id },\n- select: { code: true, theme: true, view: true },\n+ select: { code: true, theme: true },\n })\n \n return <Playground intialState={initialState} />\ndiff --git a/playground/src/components/Editor.tsx b/playground/src/components/Editor.tsx\nindex 8263dba..e82469a 100644\n--- a/playground/src/components/Editor.tsx\n+++ b/playground/src/components/Editor.tsx\n@@ -123,10 +123,7 @@ export const Editor = (props: EditorProps) => {\n \n return (\n <Flex flex=\"1\" direction=\"column\" align=\"flex-start\">\n- <Tabs\n- defaultValue={value.view}\n- className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}\n- >\n+ <Tabs defaultValue=\"code\" className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}>\n <TabList\n className={css({\n px: '6',\ndiff --git a/playground/src/components/usePlayground.ts b/playground/src/components/usePlayground.ts\nindex 74b6069..a959fca 100644\n--- a/playground/src/components/usePlayground.ts\n+++ b/playground/src/components/usePlayground.ts\n@@ -4,7 +4,6 @@ import { Layout } from './LayoutControl'\n export type State = {\n code: string\n theme: string\n- view: string\n }\n \n export type UsePlayGroundProps = {\n@@ -51,7 +50,7 @@ export const App = () => {\n body: JSON.stringify(state),\n })\n .then((response) => response.json())\n- .then((data) => {\n+ .then(({ data }) => {\n history.pushState({ id: data.id }, '', data.id)\n setIsPristine(true)\n })\ndiff --git a/playground/src/pages/api/share.ts b/playground/src/pages/api/share.ts\nindex 23f8b9e..e6f3f26 100644\n--- a/playground/src/pages/api/share.ts\n+++ b/playground/src/pages/api/share.ts\n@@ -7,17 +7,16 @@ import { prisma } from '../../client/prisma'\n const schema = z.object({\n code: z.string(),\n theme: z.string(),\n- view: z.enum(['code', 'config']).optional(),\n })\n \n const handler = async (req: NextApiRequest, res: NextApiResponse) =>\n match(req)\n .with({ method: 'POST' }, async () => {\n try {\n- const { code, theme } = schema.parse(req.body)\n+ const data = schema.parse(req.body)\n const id = nanoid(10)\n- await prisma.session.create({ data: { id, code, theme } })\n- return res.status(200).json({ id })\n+ const session = await prisma.session.create({ data: { id, ...data }, select: { id: true } })\n+ return res.status(200).json({ success: true, data: session })\n } catch (e) {\n console.log(e)\n return res.status(500).json({ success: false })\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 176ab58..bead402 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -326,7 +326,7 @@ pipeline {\n TAG = \"${env.VERSION}-${env.GIT_COMMIT}\"\n DOCKER_GCR = credentials(\"zeebe-gcr-serviceaccount-json\")\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\n QA_RUN_VARIABLES = \"{\\\"zeebeImage\\\": \\\"${env.IMAGE}:${env.TAG}\\\", \\\"generationTemplate\\\": \\\"${params.GENERATION_TEMPLATE}\\\", \" +\n \"\\\"channel\\\": \\\"Internal Dev\\\", \\\"branch\\\": \\\"${env.BRANCH_NAME}\\\", \\\"build\\\": \\\"${currentBuild.absoluteUrl}\\\", \" +\n \"\\\"businessKey\\\": \\\"${currentBuild.absoluteUrl}\\\", \\\"processId\\\": \\\"qa-protocol\\\"}\"\n@@ -341,7 +341,7 @@ pipeline {\n withVault(\n [vaultSecrets:\n [\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\n secretValues:\n [\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\nindex 167444c..7494014 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\n@@ -11,6 +11,9 @@ import io.camunda.zeebe.engine.state.ZbColumnFamilies;\n import io.camunda.zeebe.engine.util.EngineRule;\n import io.camunda.zeebe.engine.util.ListLogStorage;\n import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n import org.assertj.core.api.SoftAssertions;\n import org.awaitility.Awaitility;\n import org.junit.Rule;\n@@ -27,16 +30,22 @@ public class ContinuouslyReplayTest {\n @Rule public final EngineRule processing = EngineRule.withSharedStorage(sharedStorage);\n \n @Test\n- public void shouldEndUpWithTheSameState() {\n+ public void shouldBuildTheSameStateOnProcessingAndReplay() {\n // given\n-\n- // when\n processing\n .deployment()\n- .withXmlResource(Bpmn.createExecutableProcess().startEvent().endEvent().done())\n+ .withXmlResource(Bpmn.createExecutableProcess(\"process\").startEvent().endEvent().done())\n .deploy();\n \n+ // when\n+ final var processInstanceKey = processing.processInstance().ofBpmnProcessId(\"process\").create();\n+\n // then\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessInstanceKey(processInstanceKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n assertStates();\n }\n \ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\nnew file mode 100644\nindex 0000000..9dd9f4c\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n@@ -0,0 +1,121 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.streamprocessor;\n+\n+import static io.camunda.zeebe.engine.util.RecordToWrite.command;\n+import static io.camunda.zeebe.engine.util.RecordToWrite.event;\n+import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ACTIVATE_ELEMENT;\n+import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ELEMENT_ACTIVATING;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.mockito.ArgumentMatchers.any;\n+import static org.mockito.ArgumentMatchers.anyLong;\n+import static org.mockito.ArgumentMatchers.eq;\n+import static org.mockito.Mockito.inOrder;\n+import static org.mockito.Mockito.never;\n+import static org.mockito.Mockito.timeout;\n+\n+import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n+import io.camunda.zeebe.engine.state.EventApplier;\n+import io.camunda.zeebe.engine.util.Records;\n+import io.camunda.zeebe.engine.util.StreamProcessorRule;\n+import io.camunda.zeebe.protocol.impl.record.value.processinstance.ProcessInstanceRecord;\n+import io.camunda.zeebe.protocol.record.ValueType;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.mockito.InOrder;\n+import org.mockito.Mock;\n+import org.mockito.junit.MockitoJUnit;\n+import org.mockito.junit.MockitoRule;\n+import org.mockito.verification.VerificationWithTimeout;\n+\n+public final class StreamProcessorReplayModeTest {\n+\n+ private static final long TIMEOUT_MILLIS = 2_000L;\n+ private static final VerificationWithTimeout TIMEOUT = timeout(TIMEOUT_MILLIS);\n+\n+ private static final int PARTITION_ID = 1;\n+\n+ private static final ProcessInstanceRecord RECORD = Records.processInstance(1);\n+\n+ @Rule\n+ public final StreamProcessorRule replayUntilEnd =\n+ new StreamProcessorRule(PARTITION_ID).withReplayMode(ReplayMode.UNTIL_END);\n+\n+ @Rule\n+ public final StreamProcessorRule replayContinuously =\n+ new StreamProcessorRule(PARTITION_ID).withReplayMode(ReplayMode.CONTINUOUSLY);\n+\n+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();\n+\n+ @Mock private TypedRecordProcessor<?> typedRecordProcessor;\n+ @Mock private EventApplier eventApplier;\n+\n+ @Test\n+ public void shouldReplayUntilEnd() {\n+ // given\n+ replayUntilEnd.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // when\n+ startStreamProcessor(replayUntilEnd);\n+\n+ replayUntilEnd.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // then\n+ final InOrder inOrder = inOrder(typedRecordProcessor, eventApplier);\n+ inOrder.verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n+ inOrder.verify(typedRecordProcessor, TIMEOUT.times(1)).onRecovered(any());\n+ inOrder\n+ .verify(typedRecordProcessor, TIMEOUT)\n+ .processRecord(anyLong(), any(), any(), any(), any());\n+ inOrder.verifyNoMoreInteractions();\n+\n+ assertThat(getCurrentPhase(replayUntilEnd)).isEqualTo(Phase.PROCESSING);\n+ }\n+\n+ @Test\n+ public void shouldReplayContinuously() {\n+ // given\n+ replayContinuously.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // when\n+ startStreamProcessor(replayContinuously);\n+\n+ replayContinuously.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // then\n+ final InOrder inOrder = inOrder(typedRecordProcessor, eventApplier);\n+ inOrder\n+ .verify(eventApplier, TIMEOUT.times(2))\n+ .applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n+ inOrder.verify(typedRecordProcessor, never()).onRecovered(any());\n+ inOrder.verifyNoMoreInteractions();\n+\n+ assertThat(getCurrentPhase(replayContinuously)).isEqualTo(Phase.REPROCESSING);\n+ }\n+\n+ private void startStreamProcessor(final StreamProcessorRule streamProcessorRule) {\n+ streamProcessorRule\n+ .withEventApplierFactory(zeebeState -> eventApplier)\n+ .startTypedStreamProcessor(\n+ (processors, context) ->\n+ processors.onCommand(\n+ ValueType.PROCESS_INSTANCE, ACTIVATE_ELEMENT, typedRecordProcessor));\n+ }\n+\n+ private Phase getCurrentPhase(final StreamProcessorRule streamProcessorRule) {\n+ return streamProcessorRule.getStreamProcessor(PARTITION_ID).getCurrentPhase().join();\n+ }\n+}\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java b/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\nindex f626ed4..cf07b5c 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\n@@ -101,7 +101,6 @@ public final class EngineRule extends ExternalResource {\n new Int2ObjectHashMap<>();\n \n private long lastProcessedPosition = -1L;\n- private ReplayMode replayMode;\n \n private EngineRule(final int partitionCount) {\n this(partitionCount, null);\n@@ -176,7 +175,7 @@ public final class EngineRule extends ExternalResource {\n }\n \n public EngineRule withReplayMode(final ReplayMode replayMode) {\n- this.replayMode = replayMode;\n+ environmentRule.withReplayMode(replayMode);\n return this;\n }\n \n@@ -194,7 +193,6 @@ public final class EngineRule extends ExternalResource {\n (processingContext) ->\n EngineProcessors.createEngineProcessors(\n processingContext\n- .replayMode(replayMode)\n .onProcessedListener(\n record -> {\n lastProcessedPosition = record.getPosition();\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java b/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\nindex ab44773..1f9fe26 100755\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.engine.util;\n import static io.camunda.zeebe.engine.util.StreamProcessingComposite.getLogName;\n \n import io.camunda.zeebe.db.ZeebeDbFactory;\n+import io.camunda.zeebe.engine.processing.streamprocessor.ReplayMode;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecord;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecordProcessorFactory;\n@@ -64,6 +65,7 @@ public final class StreamProcessorRule implements TestRule {\n private TestStreams streams;\n private StreamProcessingComposite streamProcessingComposite;\n private ListLogStorage sharedStorage = null;\n+ private ReplayMode replayMode = ReplayMode.UNTIL_END;\n \n public StreamProcessorRule() {\n this(new TemporaryFolder());\n@@ -125,6 +127,11 @@ public final class StreamProcessorRule implements TestRule {\n return this;\n }\n \n+ public StreamProcessorRule withReplayMode(final ReplayMode replayMode) {\n+ this.replayMode = replayMode;\n+ return this;\n+ }\n+\n public LogStreamRecordWriter getLogStreamRecordWriter(final int partitionId) {\n return streamProcessingComposite.getLogStreamRecordWriter(partitionId);\n }\n@@ -317,6 +324,7 @@ public final class StreamProcessorRule implements TestRule {\n @Override\n protected void before() {\n streams = new TestStreams(tempFolder, closeables, actorSchedulerRule.get());\n+ streams.withReplayMode(replayMode);\n \n int partitionId = startPartitionId;\n for (int i = 0; i < partitionCount; i++) {\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java b/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\nindex 18696b2..176c405 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\n@@ -17,6 +17,7 @@ import static org.mockito.Mockito.when;\n \n import io.camunda.zeebe.db.ZeebeDb;\n import io.camunda.zeebe.db.ZeebeDbFactory;\n+import io.camunda.zeebe.engine.processing.streamprocessor.ReplayMode;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedEventRegistry;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecord;\n@@ -79,6 +80,7 @@ public final class TestStreams {\n private boolean snapshotWasTaken = false;\n \n private Function<MutableZeebeState, EventApplier> eventApplierFactory = EventAppliers::new;\n+ private ReplayMode replayMode = ReplayMode.UNTIL_END;\n \n public TestStreams(\n final TemporaryFolder dataDirectory,\n@@ -107,6 +109,10 @@ public final class TestStreams {\n this.eventApplierFactory = eventApplierFactory;\n }\n \n+ public void withReplayMode(final ReplayMode replayMode) {\n+ this.replayMode = replayMode;\n+ }\n+\n public CommandResponseWriter getMockedResponseWriter() {\n return mockCommandResponseWriter;\n }\n@@ -252,6 +258,7 @@ public final class TestStreams {\n .onProcessedListener(mockOnProcessedListener)\n .streamProcessorFactory(factory)\n .eventApplierFactory(eventApplierFactory)\n+ .replayMode(replayMode)\n .build();\n final var openFuture = streamProcessor.openAsync(false);\n \n", "diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py\nindex e5bc800..1afa105 100644\n--- a/api/chalicelib/core/dashboard.py\n+++ b/api/chalicelib/core/dashboard.py\n@@ -126,13 +126,6 @@ SESSIONS_META_FIELDS = {\"revId\": \"rev_id\",\n \"browser\": \"user_browser\"}\n \n \n-def __get_domains_errors_neutral(rows):\n- neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}\n- if len(neutral.keys()) == 0:\n- neutral = {\"All\": 0}\n- return neutral\n-\n-\n def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),\n endTimestamp=TimeUTC.now(),\n density=7, **args):\n", "diff --git a/docs/learn/101-use.md b/docs/learn/101-use.md\nindex 283c1c1..2ec10f9 100644\n--- a/docs/learn/101-use.md\n+++ b/docs/learn/101-use.md\n@@ -41,8 +41,7 @@ cd ./examples/todoapp\n The example app contains encrypted secrets and other pre-configured inputs, here is how to decrypt them:\n \n ```sh\n-curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\n-dagger input list\n+dagger input list || curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\n ```\n \n **Step 4**: Deploy!\n"]
5
["9c2c7ea1d4935d30e014ca807a4f9cb1665b1e41", "c81a0c2999454c859b4bf4da5779712960d239be", "48d5d573886e9fdd0cca1cea47112c4a2f6edf52", "b7b1d2b315443e1854403c8fe8f871c4632b5d31", "2b01808ec86fe9d8b4a93141a1b7f95e11fd6010"]
["feat", "ci", "test", "refactor", "docs"]
also make dependents when running smoke tests,add LICENSE,create mock img server,remove unused branches and ignore envrc file,move group logical op outside Signed-off-by: Pranav C <[email protected]>
["diff --git a/.github/workflows/os-smoke-test.yml b/.github/workflows/os-smoke-test.yml\nindex 194d108..7e41493 100644\n--- a/.github/workflows/os-smoke-test.yml\n+++ b/.github/workflows/os-smoke-test.yml\n@@ -56,5 +56,7 @@ jobs:\n uses: JesseTG/[email protected]\n with:\n path: /Users/runner/.m2/repository/uk/co/real-logic/sbe-tool\n+ - name: Build relevant modules\n+ run: mvn -B -am -pl qa/integration-tests package -DskipTests -DskipChecks -T1C\n - name: Run smoke test\n run: mvn -B -pl qa/integration-tests verify -P smoke-test -DskipUTs -DskipChecks\n", "diff --git a/LICENSE b/LICENSE\nnew file mode 100644\nindex 0000000..005581d\n--- /dev/null\n+++ b/LICENSE\n@@ -0,0 +1,21 @@\n+MIT License\n+\n+Copyright (c) Hassan El Mghari\n+\n+Permission is hereby granted, free of charge, to any person obtaining a copy\n+of this software and associated documentation files (the \"Software\"), to deal\n+in the Software without restriction, including without limitation the rights\n+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n+copies of the Software, and to permit persons to whom the Software is\n+furnished to do so, subject to the following conditions:\n+\n+The above copyright notice and this permission notice shall be included in all\n+copies or substantial portions of the Software.\n+\n+THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n+SOFTWARE.\n", "diff --git a/scripts/gulp/tasks/test.ts b/scripts/gulp/tasks/test.ts\nindex 8014b12..d10c1aa 100644\n--- a/scripts/gulp/tasks/test.ts\n+++ b/scripts/gulp/tasks/test.ts\n@@ -26,12 +26,18 @@ task('test.imageserver', () => {\n function handleRequest(req, res) {\n const urlParse = url.parse(req.url, true);\n \n+ res.setHeader('Access-Control-Allow-Origin', '*');\n+ res.setHeader('Access-Control-Allow-Methods', 'GET');\n+ res.setHeader('Connection', 'keep-alive');\n+ res.setHeader('Age', '0');\n+ res.setHeader('cache-control', 'no-store');\n+\n if (urlParse.pathname === '/reset') {\n console.log('Image Server Reset');\n console.log('---------------------------');\n requestedUrls.length = 0;\n start = Date.now();\n- res.setHeader('Access-Control-Allow-Origin', '*');\n+ res.setHeader('Content-Type', 'text/plain');\n res.end('reset');\n return;\n }\n@@ -48,9 +54,8 @@ task('test.imageserver', () => {\n \n setTimeout(() => {\n res.setHeader('Content-Type', 'image/svg+xml');\n- res.setHeader('Access-Control-Allow-Origin', '*');\n res.end(`<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n- style=\"background-color: ${color}; width: ${width}px; height: ${height}px;\">\n+ viewBox=\"0 0 ${width} ${height}\" style=\"background-color: ${color};\">\n <text x=\"5\" y=\"22\" style=\"font-family: Courier; font-size: 24px\">${id}</text>\n </svg>`);\n }, delay);\n", "diff --git a/.github/workflows/ibis-backends-cloud.yml b/.github/workflows/ibis-backends-cloud.yml\nindex 2003e8e..7c7fd26 100644\n--- a/.github/workflows/ibis-backends-cloud.yml\n+++ b/.github/workflows/ibis-backends-cloud.yml\n@@ -5,9 +5,12 @@ on:\n # Skip the backend suite if all changes are in the docs directory\n paths-ignore:\n - \"docs/**\"\n+ - \"**/*.md\"\n+ - \"**/*.qmd\"\n+ - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n- - quarto\n \n permissions:\n # this allows extractions/setup-just to list releases for `just` at a higher\ndiff --git a/.github/workflows/ibis-backends-skip-helper.yml b/.github/workflows/ibis-backends-skip-helper.yml\nindex 5d5f3f7..0471994 100644\n--- a/.github/workflows/ibis-backends-skip-helper.yml\n+++ b/.github/workflows/ibis-backends-skip-helper.yml\n@@ -9,20 +9,20 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n jobs:\n test_backends:\ndiff --git a/.github/workflows/ibis-backends.yml b/.github/workflows/ibis-backends.yml\nindex 4a1cae9..30e6c1a 100644\n--- a/.github/workflows/ibis-backends.yml\n+++ b/.github/workflows/ibis-backends.yml\n@@ -8,10 +8,10 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n # Skip the backend suite if all changes are docs\n paths-ignore:\n@@ -19,10 +19,10 @@ on:\n - \"**/*.md\"\n - \"**/*.qmd\"\n - \"codecov.yml\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n permissions:\ndiff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 1adda11..b528a30 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -5,12 +5,10 @@ on:\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/ibis-main-skip-helper.yml b/.github/workflows/ibis-main-skip-helper.yml\nindex a5fdc6f..0fb5dea 100644\n--- a/.github/workflows/ibis-main-skip-helper.yml\n+++ b/.github/workflows/ibis-main-skip-helper.yml\n@@ -8,19 +8,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n jobs:\n test_core:\ndiff --git a/.github/workflows/ibis-main.yml b/.github/workflows/ibis-main.yml\nindex aa31436..0b1536a 100644\n--- a/.github/workflows/ibis-main.yml\n+++ b/.github/workflows/ibis-main.yml\n@@ -7,20 +7,20 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n # Skip the test suite if all changes are in the docs directory\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n permissions:\ndiff --git a/.github/workflows/ibis-tpch-queries-skip-helper.yml b/.github/workflows/ibis-tpch-queries-skip-helper.yml\nindex 1f1c0bc..f10fb8d 100644\n--- a/.github/workflows/ibis-tpch-queries-skip-helper.yml\n+++ b/.github/workflows/ibis-tpch-queries-skip-helper.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/ibis-tpch-queries.yml b/.github/workflows/ibis-tpch-queries.yml\nindex b4f8a48..9e65a61 100644\n--- a/.github/workflows/ibis-tpch-queries.yml\n+++ b/.github/workflows/ibis-tpch-queries.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\ndiff --git a/.github/workflows/nix-skip-helper.yml b/.github/workflows/nix-skip-helper.yml\nindex 677b4d7..e0ab8f7 100644\n--- a/.github/workflows/nix-skip-helper.yml\n+++ b/.github/workflows/nix-skip-helper.yml\n@@ -9,19 +9,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n jobs:\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex f2dd3f0..7ea9e26 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -6,19 +6,19 @@ on:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n pull_request:\n paths-ignore:\n - \"docs/**\"\n - \"**/*.md\"\n - \"**/*.qmd\"\n+ - \".envrc\"\n branches:\n - master\n - \"*.x.x\"\n- - quarto\n merge_group:\n \n concurrency:\n", "diff --git a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\nindex 5138589..f756981 100644\n--- a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n@@ -2,40 +2,46 @@\n <div\n class=\"backgroundColor pa-2 menu-filter-dropdown\"\n :class=\"{ nested }\"\n- :style=\"{ width: nested ? '100%' : '530px' }\"\n+ :style=\"{ width: nested ? '100%' : '630px' }\"\n >\n <div class=\"grid\" @click.stop>\n <template v-for=\"(filter, i) in filters\" dense>\n <template v-if=\"filter.status !== 'delete'\">\n- <div v-if=\"filter.is_group\" :key=\"i\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n- <div class=\"d-flex\" style=\"gap: 6px; padding: 0 6px\">\n- <v-icon\n- v-if=\"!filter.readOnly\"\n- small\n- class=\"nc-filter-item-remove-btn\"\n- @click.stop=\"deleteFilter(filter, i)\"\n- >\n- mdi-close-box\n- </v-icon>\n- <span v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n- <v-select\n- v-else\n- v-model=\"filter.logical_op\"\n- class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n- :items=\"['and', 'or']\"\n- solo\n- flat\n- dense\n- hide-details\n- placeholder=\"Group op\"\n- @click.stop\n- @change=\"saveOrUpdate(filter, i)\"\n- >\n- <template #item=\"{ item }\">\n- <span class=\"caption font-weight-regular\">{{ item }}</span>\n- </template>\n- </v-select>\n- </div>\n+ <template v-if=\"filter.is_group\">\n+ <v-icon\n+ v-if=\"!filter.readOnly\"\n+ small\n+ class=\"nc-filter-item-remove-btn\"\n+ @click.stop=\"deleteFilter(filter, i)\"\n+ :key=\"i + '_1'\"\n+ >\n+ mdi-close-box\n+ </v-icon>\n+ <span v-else :key=\"i + '_1'\" />\n+\n+ <span :key=\"i + '_2'\" v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <v-select\n+ v-else\n+ :key=\"i + '_2'\"\n+ v-model=\"filter.logical_op\"\n+ class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n+ :items=\"['and', 'or']\"\n+ solo\n+ flat\n+ dense\n+ hide-details\n+ placeholder=\"Group op\"\n+ @click.stop\n+ @change=\"saveOrUpdate(filter, i)\"\n+ >\n+ <template #item=\"{ item }\">\n+ <span class=\"caption font-weight-regular\">{{ item }}</span>\n+ </template>\n+ </v-select>\n+ <span :key=\"i + '_3'\" style=\"grid-column: span 3\"></span>\n+ </template>\n+\n+ <div v-if=\"filter.is_group\" :key=\"i + '_4'\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n <column-filter\n v-if=\"filter.id || shared\"\n ref=\"nestedFilter\"\n@@ -54,19 +60,19 @@\n <template v-else>\n <v-icon\n v-if=\"!filter.readOnly\"\n- :key=\"i + '_1'\"\n+ :key=\"i + '_5'\"\n small\n class=\"nc-filter-item-remove-btn\"\n @click.stop=\"deleteFilter(filter, i)\"\n >\n mdi-close-box\n </v-icon>\n- <span v-else :key=\"i + '_1'\" />\n- <span v-if=\"!i\" :key=\"i + '_2'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <span v-else :key=\"i + '_5'\" />\n+ <span v-if=\"!i\" :key=\"i + '_6'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n \n <v-select\n v-else\n- :key=\"i + '_2'\"\n+ :key=\"i + '_6'\"\n v-model=\"filter.logical_op\"\n class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n :items=\"['and', 'or']\"\n@@ -84,7 +90,7 @@\n </v-select>\n \n <field-list-auto-complete-dropdown\n- :key=\"i + '_3'\"\n+ :key=\"i + '_7'\"\n v-model=\"filter.fk_column_id\"\n class=\"caption nc-filter-field-select\"\n :columns=\"columns\"\n@@ -94,7 +100,7 @@\n />\n \n <v-select\n- :key=\"i + '_4'\"\n+ :key=\"i + '_8'\"\n v-model=\"filter.comparison_op\"\n class=\"flex-shrink-1 flex-grow-0 caption nc-filter-operation-select\"\n :items=\"filterComparisonOp(filter)\"\n@@ -114,11 +120,11 @@\n <span class=\"caption font-weight-regular\">{{ item.text }}</span>\n </template>\n </v-select>\n- <span v-else :key=\"i + '_4'\"></span>\n+ <span v-else :key=\"i + '_8'\"></span>\n <span v-if=\"['null', 'notnull', 'empty', 'notempty'].includes(filter.comparison_op)\" :key=\"i + '_5'\" />\n <v-checkbox\n v-else-if=\"types[filter.field] === 'boolean'\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n dense\n :disabled=\"filter.readOnly\"\n@@ -126,7 +132,7 @@\n />\n <v-text-field\n v-else-if=\"filter && filter.fk_column_id\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n solo\n flat\n@@ -137,7 +143,7 @@\n @click.stop\n @input=\"saveOrUpdate(filter, i)\"\n />\n- <span v-else :key=\"i + '_5'\"></span>\n+ <span v-else :key=\"i + '_9'\"></span>\n </template>\n </template>\n </template>\n@@ -411,6 +417,7 @@ export default {\n parentId: this.parentId,\n is_group: true,\n status: 'update',\n+ logical_op: 'and',\n });\n this.filters = this.filters.slice();\n const index = this.filters.length - 1;\n@@ -478,4 +485,8 @@ export default {\n column-gap: 6px;\n row-gap: 6px;\n }\n+\n+.nc-filter-value-select {\n+ min-width: 100px;\n+}\n </style>\n"]
5
["2236b37bd671fdb71313cbc6ebd7633f0effba34", "096145f0d32a6b351b1db413b04a685952f04fb3", "32b76173a259ea1993298289b436cf10c1e800bf", "d0c6476df61b9c6ab07b87e1724ea7c5318595bb", "4f86f2570b274c45605cc59d9adb38f7ed30cd17"]
["build", "docs", "test", "ci", "refactor"]
offset tests for min and max read cursors,Port shard precreation service from InfluxDB 1.x Provides new configuration parameters: ``` --storage-shard-precreator-advance-period --storage-shard-precreator-check-interval ``` Closes #19520,Fix readme Signed-off-by: Ben Johnson <[email protected]>,get ip from forwarded header,add gitignore.nix to dep update matrix
["diff --git a/storage/reads/array_cursor_test.go b/storage/reads/array_cursor_test.go\nindex 7c7ad0c..c1e6ff9 100644\n--- a/storage/reads/array_cursor_test.go\n+++ b/storage/reads/array_cursor_test.go\n@@ -1541,6 +1541,34 @@ func TestWindowMinArrayCursor(t *testing.T) {\n \t\t\t},\n \t\t},\n \t\t{\n+\t\t\tname: \"window offset\",\n+\t\t\tevery: time.Hour,\n+\t\t\toffset: 30 * time.Minute,\n+\t\t\tinputArrays: []*cursors.IntegerArray{\n+\t\t\t\tmakeIntegerArray(\n+\t\t\t\t\t16,\n+\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\"), 15*time.Minute,\n+\t\t\t\t\tfunc(i int64) int64 {\n+\t\t\t\t\t\tbase := (i / 4) * 100\n+\t\t\t\t\t\tm := (i % 4) * 15\n+\t\t\t\t\t\treturn base + m\n+\t\t\t\t\t},\n+\t\t\t\t),\n+\t\t\t},\n+\t\t\twant: []*cursors.IntegerArray{\n+\t\t\t\t{\n+\t\t\t\t\tTimestamps: []int64{\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:30:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T01:30:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T02:30:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:30:00Z\").UnixNano(),\n+\t\t\t\t\t},\n+\t\t\t\t\tValues: []int64{0, 30, 130, 230, 330},\n+\t\t\t\t},\n+\t\t\t},\n+\t\t},\n+\t\t{\n \t\t\tname: \"window desc values\",\n \t\t\tevery: time.Hour,\n \t\t\tinputArrays: []*cursors.IntegerArray{\n@@ -1560,6 +1588,34 @@ func TestWindowMinArrayCursor(t *testing.T) {\n \t\t\t},\n \t\t},\n \t\t{\n+\t\t\tname: \"window offset desc values\",\n+\t\t\tevery: time.Hour,\n+\t\t\toffset: 30 * time.Minute,\n+\t\t\tinputArrays: []*cursors.IntegerArray{\n+\t\t\t\tmakeIntegerArray(\n+\t\t\t\t\t16,\n+\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\"), 15*time.Minute,\n+\t\t\t\t\tfunc(i int64) int64 {\n+\t\t\t\t\t\tbase := (i / 4) * 100\n+\t\t\t\t\t\tm := 60 - (i%4)*15\n+\t\t\t\t\t\treturn base + m\n+\t\t\t\t\t},\n+\t\t\t\t),\n+\t\t\t},\n+\t\t\twant: []*cursors.IntegerArray{\n+\t\t\t\t{\n+\t\t\t\t\tTimestamps: []int64{\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:15:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:45:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T01:45:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T02:45:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:45:00Z\").UnixNano(),\n+\t\t\t\t\t},\n+\t\t\t\t\tValues: []int64{45, 15, 115, 215, 315},\n+\t\t\t\t},\n+\t\t\t},\n+\t\t},\n+\t\t{\n \t\t\tname: \"window min int\",\n \t\t\tevery: time.Hour,\n \t\t\tinputArrays: []*cursors.IntegerArray{\n@@ -1693,6 +1749,34 @@ func TestWindowMaxArrayCursor(t *testing.T) {\n \t\t\t},\n \t\t},\n \t\t{\n+\t\t\tname: \"window offset\",\n+\t\t\tevery: time.Hour,\n+\t\t\toffset: 30 * time.Minute,\n+\t\t\tinputArrays: []*cursors.IntegerArray{\n+\t\t\t\tmakeIntegerArray(\n+\t\t\t\t\t16,\n+\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\"), 15*time.Minute,\n+\t\t\t\t\tfunc(i int64) int64 {\n+\t\t\t\t\t\tbase := (i / 4) * 100\n+\t\t\t\t\t\tm := (i % 4) * 15\n+\t\t\t\t\t\treturn base + m\n+\t\t\t\t\t},\n+\t\t\t\t),\n+\t\t\t},\n+\t\t\twant: []*cursors.IntegerArray{\n+\t\t\t\t{\n+\t\t\t\t\tTimestamps: []int64{\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:15:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T01:15:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T02:15:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:15:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:45:00Z\").UnixNano(),\n+\t\t\t\t\t},\n+\t\t\t\t\tValues: []int64{15, 115, 215, 315, 345},\n+\t\t\t\t},\n+\t\t\t},\n+\t\t},\n+\t\t{\n \t\t\tname: \"window desc values\",\n \t\t\tevery: time.Hour,\n \t\t\tinputArrays: []*cursors.IntegerArray{\n@@ -1712,6 +1796,34 @@ func TestWindowMaxArrayCursor(t *testing.T) {\n \t\t\t},\n \t\t},\n \t\t{\n+\t\t\tname: \"window offset desc values\",\n+\t\t\tevery: time.Hour,\n+\t\t\toffset: 30 * time.Minute,\n+\t\t\tinputArrays: []*cursors.IntegerArray{\n+\t\t\t\tmakeIntegerArray(\n+\t\t\t\t\t16,\n+\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\"), 15*time.Minute,\n+\t\t\t\t\tfunc(i int64) int64 {\n+\t\t\t\t\t\tbase := (i / 4) * 100\n+\t\t\t\t\t\tm := 60 - (i%4)*15\n+\t\t\t\t\t\treturn base + m\n+\t\t\t\t\t},\n+\t\t\t\t),\n+\t\t\t},\n+\t\t\twant: []*cursors.IntegerArray{\n+\t\t\t\t{\n+\t\t\t\t\tTimestamps: []int64{\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T00:00:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T01:00:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T02:00:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:00:00Z\").UnixNano(),\n+\t\t\t\t\t\tmustParseTime(\"2010-01-01T03:30:00Z\").UnixNano(),\n+\t\t\t\t\t},\n+\t\t\t\t\tValues: []int64{60, 160, 260, 360, 330},\n+\t\t\t\t},\n+\t\t\t},\n+\t\t},\n+\t\t{\n \t\t\tname: \"window min int\",\n \t\t\tevery: time.Hour,\n \t\t\tinputArrays: []*cursors.IntegerArray{\n", "diff --git a/cmd/influxd/launcher/launcher.go b/cmd/influxd/launcher/launcher.go\nindex e3548ef..5559e94 100644\n--- a/cmd/influxd/launcher/launcher.go\n+++ b/cmd/influxd/launcher/launcher.go\n@@ -440,6 +440,16 @@ func launcherOpts(l *Launcher) []cli.Opt {\n \t\t\tFlag: \"storage-retention-check-interval\",\n \t\t\tDesc: \"The interval of time when retention policy enforcement checks run.\",\n \t\t},\n+\t\t{\n+\t\t\tDestP: &l.StorageConfig.PrecreatorConfig.CheckInterval,\n+\t\t\tFlag: \"storage-shard-precreator-check-interval\",\n+\t\t\tDesc: \"The interval of time when the check to pre-create new shards runs.\",\n+\t\t},\n+\t\t{\n+\t\t\tDestP: &l.StorageConfig.PrecreatorConfig.AdvancePeriod,\n+\t\t\tFlag: \"storage-shard-precreator-advance-period\",\n+\t\t\tDesc: \"The default period ahead of the endtime of a shard group that its successor group is created.\",\n+\t\t},\n \n \t\t// InfluxQL Coordinator Config\n \t\t{\ndiff --git a/storage/config.go b/storage/config.go\nindex ef953a2..d8e24db 100644\n--- a/storage/config.go\n+++ b/storage/config.go\n@@ -2,6 +2,7 @@ package storage\n \n import (\n \t\"github.com/influxdata/influxdb/v2/tsdb\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/retention\"\n )\n \n@@ -10,6 +11,7 @@ type Config struct {\n \tData tsdb.Config\n \n \tRetentionService retention.Config\n+\tPrecreatorConfig precreator.Config\n }\n \n // NewConfig initialises a new config for an Engine.\n@@ -17,5 +19,6 @@ func NewConfig() Config {\n \treturn Config{\n \t\tData: tsdb.NewConfig(),\n \t\tRetentionService: retention.NewConfig(),\n+\t\tPrecreatorConfig: precreator.NewConfig(),\n \t}\n }\ndiff --git a/storage/engine.go b/storage/engine.go\nindex 8518f48..ae37fdd 100644\n--- a/storage/engine.go\n+++ b/storage/engine.go\n@@ -19,6 +19,7 @@ import (\n \t_ \"github.com/influxdata/influxdb/v2/tsdb/index/tsi1\"\n \t\"github.com/influxdata/influxdb/v2/v1/coordinator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/meta\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n \t\"github.com/influxdata/influxdb/v2/v1/services/retention\"\n \t\"github.com/influxdata/influxql\"\n \t\"github.com/pkg/errors\"\n@@ -42,7 +43,8 @@ type Engine struct {\n \t\tWritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error\n \t}\n \n-\tretentionService *retention.Service\n+\tretentionService *retention.Service\n+\tprecreatorService *precreator.Service\n \n \tdefaultMetricLabels prometheus.Labels\n \n@@ -66,6 +68,7 @@ type MetaClient interface {\n \tDatabase(name string) (di *meta.DatabaseInfo)\n \tDatabases() []meta.DatabaseInfo\n \tDeleteShardGroup(database, policy string, id uint64) error\n+\tPrecreateShardGroups(now, cutoff time.Time) error\n \tPruneShardGroups() error\n \tRetentionPolicy(database, policy string) (*meta.RetentionPolicyInfo, error)\n \tShardGroupsByTimeRange(database, policy string, min, max time.Time) (a []meta.ShardGroupInfo, err error)\n@@ -115,6 +118,9 @@ func NewEngine(path string, c Config, options ...Option) *Engine {\n \te.retentionService.TSDBStore = e.tsdbStore\n \te.retentionService.MetaClient = e.metaClient\n \n+\te.precreatorService = precreator.NewService(c.PrecreatorConfig)\n+\te.precreatorService.MetaClient = e.metaClient\n+\n \treturn e\n }\n \n@@ -132,6 +138,10 @@ func (e *Engine) WithLogger(log *zap.Logger) {\n \tif e.retentionService != nil {\n \t\te.retentionService.WithLogger(log)\n \t}\n+\n+\tif e.precreatorService != nil {\n+\t\te.precreatorService.WithLogger(log)\n+\t}\n }\n \n // PrometheusCollectors returns all the prometheus collectors associated with\n@@ -161,6 +171,10 @@ func (e *Engine) Open(ctx context.Context) (err error) {\n \t\treturn err\n \t}\n \n+\tif err := e.precreatorService.Open(ctx); err != nil {\n+\t\treturn err\n+\t}\n+\n \te.closing = make(chan struct{})\n \n \treturn nil\n@@ -194,6 +208,10 @@ func (e *Engine) Close() error {\n \n \tvar retErr *multierror.Error\n \n+\tif err := e.precreatorService.Close(); err != nil {\n+\t\tretErr = multierror.Append(retErr, fmt.Errorf(\"error closing shard precreator service: %w\", err))\n+\t}\n+\n \tif err := e.retentionService.Close(); err != nil {\n \t\tretErr = multierror.Append(retErr, fmt.Errorf(\"error closing retention service: %w\", err))\n \t}\ndiff --git a/v1/services/precreator/README.md b/v1/services/precreator/README.md\nnew file mode 100644\nindex 0000000..8830b73\n--- /dev/null\n+++ b/v1/services/precreator/README.md\n@@ -0,0 +1,13 @@\n+Shard Precreation\n+============\n+\n+During normal operation when InfluxDB receives time-series data, it writes the data to files known as _shards_. Each shard only contains data for a specific range of time. Therefore, before data can be accepted by the system, the shards must exist and InfluxDB always checks that the required shards exist for every incoming data point. If the required shards do not exist, InfluxDB will create those shards. Because this requires a cluster to reach consensus, the process is not instantaneous and can temporarily impact write-throughput.\n+\n+Since almost all time-series data is written sequentially in time, the system has an excellent idea of the timestamps of future data. Shard precreation takes advantage of this fact by creating required shards ahead of time, thereby ensuring the required shards exist by the time new time-series data actually arrives. Write-throughput is therefore not affected when data is first received for a range of time that would normally trigger shard creation.\n+\n+Note that the shard-existence check must remain in place in the code, even with shard precreation. This is because while most data is written sequentially in time, this is not always the case. Data may be written with timestamps in the past, or farther in the future than shard precreation handles.\n+\n+## Configuration\n+Shard precreation can be disabled if necessary, though this is not recommended. If it is disabled, then shards will be only be created when explicitly needed.\n+\n+The interval between runs of the shard precreation service, as well as the time-in-advance the shards are created, are also configurable. The defaults should work for most deployments.\ndiff --git a/v1/services/precreator/config.go b/v1/services/precreator/config.go\nnew file mode 100644\nindex 0000000..5e994e6\n--- /dev/null\n+++ b/v1/services/precreator/config.go\n@@ -0,0 +1,65 @@\n+package precreator\n+\n+import (\n+\t\"errors\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/monitor/diagnostics\"\n+)\n+\n+const (\n+\t// DefaultCheckInterval is the shard precreation check time if none is specified.\n+\tDefaultCheckInterval = 10 * time.Minute\n+\n+\t// DefaultAdvancePeriod is the default period ahead of the endtime of a shard group\n+\t// that its successor group is created.\n+\tDefaultAdvancePeriod = 30 * time.Minute\n+)\n+\n+// Config represents the configuration for shard precreation.\n+type Config struct {\n+\tEnabled bool `toml:\"enabled\"`\n+\tCheckInterval toml.Duration `toml:\"check-interval\"`\n+\tAdvancePeriod toml.Duration `toml:\"advance-period\"`\n+}\n+\n+// NewConfig returns a new Config with defaults.\n+func NewConfig() Config {\n+\treturn Config{\n+\t\tEnabled: true,\n+\t\tCheckInterval: toml.Duration(DefaultCheckInterval),\n+\t\tAdvancePeriod: toml.Duration(DefaultAdvancePeriod),\n+\t}\n+}\n+\n+// Validate returns an error if the Config is invalid.\n+func (c Config) Validate() error {\n+\tif !c.Enabled {\n+\t\treturn nil\n+\t}\n+\n+\tif c.CheckInterval <= 0 {\n+\t\treturn errors.New(\"check-interval must be positive\")\n+\t}\n+\tif c.AdvancePeriod <= 0 {\n+\t\treturn errors.New(\"advance-period must be positive\")\n+\t}\n+\n+\treturn nil\n+}\n+\n+// Diagnostics returns a diagnostics representation of a subset of the Config.\n+func (c Config) Diagnostics() (*diagnostics.Diagnostics, error) {\n+\tif !c.Enabled {\n+\t\treturn diagnostics.RowFromMap(map[string]interface{}{\n+\t\t\t\"enabled\": false,\n+\t\t}), nil\n+\t}\n+\n+\treturn diagnostics.RowFromMap(map[string]interface{}{\n+\t\t\"enabled\": true,\n+\t\t\"check-interval\": c.CheckInterval,\n+\t\t\"advance-period\": c.AdvancePeriod,\n+\t}), nil\n+}\ndiff --git a/v1/services/precreator/config_test.go b/v1/services/precreator/config_test.go\nnew file mode 100644\nindex 0000000..2686001\n--- /dev/null\n+++ b/v1/services/precreator/config_test.go\n@@ -0,0 +1,67 @@\n+package precreator_test\n+\n+import (\n+\t\"testing\"\n+\t\"time\"\n+\n+\t\"github.com/BurntSushi/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+)\n+\n+func TestConfig_Parse(t *testing.T) {\n+\t// Parse configuration.\n+\tvar c precreator.Config\n+\tif _, err := toml.Decode(`\n+enabled = true\n+check-interval = \"2m\"\n+advance-period = \"10m\"\n+`, &c); err != nil {\n+\n+\t\tt.Fatal(err)\n+\t}\n+\n+\t// Validate configuration.\n+\tif !c.Enabled {\n+\t\tt.Fatalf(\"unexpected enabled state: %v\", c.Enabled)\n+\t} else if time.Duration(c.CheckInterval) != 2*time.Minute {\n+\t\tt.Fatalf(\"unexpected check interval: %s\", c.CheckInterval)\n+\t} else if time.Duration(c.AdvancePeriod) != 10*time.Minute {\n+\t\tt.Fatalf(\"unexpected advance period: %s\", c.AdvancePeriod)\n+\t}\n+}\n+\n+func TestConfig_Validate(t *testing.T) {\n+\tc := precreator.NewConfig()\n+\tif err := c.Validate(); err != nil {\n+\t\tt.Fatalf(\"unexpected validation fail from NewConfig: %s\", err)\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.CheckInterval = 0\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for check-interval = 0, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.CheckInterval *= -1\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for negative check-interval, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.AdvancePeriod = 0\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for advance-period = 0, got nil\")\n+\t}\n+\n+\tc = precreator.NewConfig()\n+\tc.AdvancePeriod *= -1\n+\tif err := c.Validate(); err == nil {\n+\t\tt.Fatal(\"expected error for negative advance-period, got nil\")\n+\t}\n+\n+\tc.Enabled = false\n+\tif err := c.Validate(); err != nil {\n+\t\tt.Fatalf(\"unexpected validation fail from disabled config: %s\", err)\n+\t}\n+}\ndiff --git a/v1/services/precreator/service.go b/v1/services/precreator/service.go\nnew file mode 100644\nindex 0000000..28e8f16\n--- /dev/null\n+++ b/v1/services/precreator/service.go\n@@ -0,0 +1,93 @@\n+// Package precreator provides the shard precreation service.\n+package precreator // import \"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+\n+import (\n+\t\"context\"\n+\t\"sync\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/logger\"\n+\t\"go.uber.org/zap\"\n+)\n+\n+// Service manages the shard precreation service.\n+type Service struct {\n+\tcheckInterval time.Duration\n+\tadvancePeriod time.Duration\n+\n+\tLogger *zap.Logger\n+\n+\tcancel context.CancelFunc\n+\twg sync.WaitGroup\n+\n+\tMetaClient interface {\n+\t\tPrecreateShardGroups(now, cutoff time.Time) error\n+\t}\n+}\n+\n+// NewService returns an instance of the precreation service.\n+func NewService(c Config) *Service {\n+\treturn &Service{\n+\t\tcheckInterval: time.Duration(c.CheckInterval),\n+\t\tadvancePeriod: time.Duration(c.AdvancePeriod),\n+\t\tLogger: zap.NewNop(),\n+\t}\n+}\n+\n+// WithLogger sets the logger for the service.\n+func (s *Service) WithLogger(log *zap.Logger) {\n+\ts.Logger = log.With(zap.String(\"service\", \"shard-precreation\"))\n+}\n+\n+// Open starts the precreation service.\n+func (s *Service) Open(ctx context.Context) error {\n+\tif s.cancel != nil {\n+\t\treturn nil\n+\t}\n+\n+\ts.Logger.Info(\"Starting precreation service\",\n+\t\tlogger.DurationLiteral(\"check_interval\", s.checkInterval),\n+\t\tlogger.DurationLiteral(\"advance_period\", s.advancePeriod))\n+\n+\tctx, s.cancel = context.WithCancel(ctx)\n+\n+\ts.wg.Add(1)\n+\tgo s.runPrecreation(ctx)\n+\treturn nil\n+}\n+\n+// Close stops the precreation service.\n+func (s *Service) Close() error {\n+\tif s.cancel == nil {\n+\t\treturn nil\n+\t}\n+\n+\ts.cancel()\n+\ts.wg.Wait()\n+\ts.cancel = nil\n+\n+\treturn nil\n+}\n+\n+// runPrecreation continually checks if resources need precreation.\n+func (s *Service) runPrecreation(ctx context.Context) {\n+\tdefer s.wg.Done()\n+\n+\tfor {\n+\t\tselect {\n+\t\tcase <-time.After(s.checkInterval):\n+\t\t\tif err := s.precreate(time.Now().UTC()); err != nil {\n+\t\t\t\ts.Logger.Info(\"Failed to precreate shards\", zap.Error(err))\n+\t\t\t}\n+\t\tcase <-ctx.Done():\n+\t\t\ts.Logger.Info(\"Terminating precreation service\")\n+\t\t\treturn\n+\t\t}\n+\t}\n+}\n+\n+// precreate performs actual resource precreation.\n+func (s *Service) precreate(now time.Time) error {\n+\tcutoff := now.Add(s.advancePeriod).UTC()\n+\treturn s.MetaClient.PrecreateShardGroups(now, cutoff)\n+}\ndiff --git a/v1/services/precreator/service_test.go b/v1/services/precreator/service_test.go\nnew file mode 100644\nindex 0000000..20289b7\n--- /dev/null\n+++ b/v1/services/precreator/service_test.go\n@@ -0,0 +1,56 @@\n+package precreator_test\n+\n+import (\n+\t\"context\"\n+\t\"os\"\n+\t\"testing\"\n+\t\"time\"\n+\n+\t\"github.com/influxdata/influxdb/v2/logger\"\n+\t\"github.com/influxdata/influxdb/v2/toml\"\n+\t\"github.com/influxdata/influxdb/v2/v1/internal\"\n+\t\"github.com/influxdata/influxdb/v2/v1/services/precreator\"\n+)\n+\n+func TestShardPrecreation(t *testing.T) {\n+\tdone := make(chan struct{})\n+\tprecreate := false\n+\n+\tvar mc internal.MetaClientMock\n+\tmc.PrecreateShardGroupsFn = func(now, cutoff time.Time) error {\n+\t\tif !precreate {\n+\t\t\tclose(done)\n+\t\t\tprecreate = true\n+\t\t}\n+\t\treturn nil\n+\t}\n+\n+\ts := NewTestService()\n+\ts.MetaClient = &mc\n+\n+\tif err := s.Open(context.Background()); err != nil {\n+\t\tt.Fatalf(\"unexpected open error: %s\", err)\n+\t}\n+\tdefer s.Close() // double close should not cause a panic\n+\n+\ttimer := time.NewTimer(100 * time.Millisecond)\n+\tselect {\n+\tcase <-done:\n+\t\ttimer.Stop()\n+\tcase <-timer.C:\n+\t\tt.Errorf(\"timeout exceeded while waiting for precreate\")\n+\t}\n+\n+\tif err := s.Close(); err != nil {\n+\t\tt.Fatalf(\"unexpected close error: %s\", err)\n+\t}\n+}\n+\n+func NewTestService() *precreator.Service {\n+\tconfig := precreator.NewConfig()\n+\tconfig.CheckInterval = toml.Duration(10 * time.Millisecond)\n+\n+\ts := precreator.NewService(config)\n+\ts.WithLogger(logger.New(os.Stderr))\n+\treturn s\n+}\n", "diff --git a/README.md b/README.md\nindex 587d655..da746bb 100644\n--- a/README.md\n+++ b/README.md\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - [docker][docs.sources.docker], [file][docs.sources.file], [journald][docs.sources.journald], [kafka][docs.sources.kafka]\n+* [**Transforms**][docs.transforms] - [json_parser][docs.transforms.json_parser], [log_to_metric][docs.transforms.log_to_metric], [lua][docs.transforms.lua], [regex_parser][docs.transforms.regex_parser]\n+* [**Sinks**][docs.sinks] - [aws_cloudwatch_logs][docs.sinks.aws_cloudwatch_logs], [aws_cloudwatch_metrics][docs.sinks.aws_cloudwatch_metrics], [aws_kinesis_streams][docs.sinks.aws_kinesis_streams], [aws_s3][docs.sinks.aws_s3], [clickhouse][docs.sinks.clickhouse], [elasticsearch][docs.sinks.elasticsearch], and [15 more][docs.sinks]\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,88 +110,6 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.vector.dev | sh\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`docker`**][docs.sources.docker] | Ingests data through the docker engine daemon and outputs [`log`][docs.data-model#log] events. |\n-| [**`file`**][docs.sources.file] | Ingests data through one or more local files and outputs [`log`][docs.data-model#log] events. |\n-| [**`journald`**][docs.sources.journald] | Ingests data through log records from journald and outputs [`log`][docs.data-model#log] events. |\n-| [**`kafka`**][docs.sources.kafka] | Ingests data through Kafka 0.9 or later and outputs [`log`][docs.data-model#log] events. |\n-| [**`statsd`**][docs.sources.statsd] | Ingests data through the StatsD UDP protocol and outputs [`metric`][docs.data-model#metric] events. |\n-| [**`stdin`**][docs.sources.stdin] | Ingests data through standard input (STDIN) and outputs [`log`][docs.data-model#log] events. |\n-| [**`syslog`**][docs.sources.syslog] | Ingests data through the Syslog 5424 protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`tcp`**][docs.sources.tcp] | Ingests data through the TCP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`udp`**][docs.sources.udp] | Ingests data through the UDP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`vector`**][docs.sources.vector] | Ingests data through another upstream [`vector` sink][docs.sinks.vector] and outputs [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events. |\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`add_fields`**][docs.transforms.add_fields] | Accepts [`log`][docs.data-model#log] events and allows you to add one or more log fields. |\n-| [**`add_tags`**][docs.transforms.add_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to add one or more metric tags. |\n-| [**`coercer`**][docs.transforms.coercer] | Accepts [`log`][docs.data-model#log] events and allows you to coerce log fields into fixed types. |\n-| [**`field_filter`**][docs.transforms.field_filter] | Accepts [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events and allows you to filter events by a log field's value. |\n-| [**`grok_parser`**][docs.transforms.grok_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value with [Grok][urls.grok]. |\n-| [**`json_parser`**][docs.transforms.json_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value as JSON. |\n-| [**`log_to_metric`**][docs.transforms.log_to_metric] | Accepts [`log`][docs.data-model#log] events and allows you to convert logs into one or more metrics. |\n-| [**`lua`**][docs.transforms.lua] | Accepts [`log`][docs.data-model#log] events and allows you to transform events with a full embedded [Lua][urls.lua] engine. |\n-| [**`regex_parser`**][docs.transforms.regex_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field's value with a [Regular Expression][urls.regex]. |\n-| [**`remove_fields`**][docs.transforms.remove_fields] | Accepts [`log`][docs.data-model#log] events and allows you to remove one or more log fields. |\n-| [**`remove_tags`**][docs.transforms.remove_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to remove one or more metric tags. |\n-| [**`sampler`**][docs.transforms.sampler] | Accepts [`log`][docs.data-model#log] events and allows you to sample events with a configurable rate. |\n-| [**`split`**][docs.transforms.split] | Accepts [`log`][docs.data-model#log] events and allows you to split a field's value on a given separator and zip the tokens into ordered field names. |\n-| [**`tokenizer`**][docs.transforms.tokenizer] | Accepts [`log`][docs.data-model#log] events and allows you to tokenize a field's value by splitting on white space, ignoring special wrapping characters, and zip the tokens into ordered field names. |\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`aws_cloudwatch_logs`**][docs.sinks.aws_cloudwatch_logs] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS CloudWatch Logs][urls.aws_cw_logs] via the [`PutLogEvents` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html). |\n-| [**`aws_cloudwatch_metrics`**][docs.sinks.aws_cloudwatch_metrics] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [AWS CloudWatch Metrics][urls.aws_cw_metrics] via the [`PutMetricData` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html). |\n-| [**`aws_kinesis_streams`**][docs.sinks.aws_kinesis_streams] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS Kinesis Data Stream][urls.aws_kinesis_data_streams] via the [`PutRecords` API endpoint](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html). |\n-| [**`aws_s3`**][docs.sinks.aws_s3] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS S3][urls.aws_s3] via the [`PutObject` API endpoint](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html). |\n-| [**`blackhole`**][docs.sinks.blackhole] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to a blackhole that simply discards data, designed for testing and benchmarking purposes. |\n-| [**`clickhouse`**][docs.sinks.clickhouse] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Clickhouse][urls.clickhouse] via the [`HTTP` Interface][urls.clickhouse_http]. |\n-| [**`console`**][docs.sinks.console] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to [standard output streams][urls.standard_streams], such as `STDOUT` and `STDERR`. |\n-| [**`datadog_metrics`**][docs.sinks.datadog_metrics] | [Batches](#buffers-and-batches) [`metric`][docs.data-model#metric] events to [Datadog][urls.datadog] metrics service using [HTTP API](https://docs.datadoghq.com/api/?lang=bash#metrics). |\n-| [**`elasticsearch`**][docs.sinks.elasticsearch] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Elasticsearch][urls.elasticsearch] via the [`_bulk` API endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html). |\n-| [**`file`**][docs.sinks.file] | [Streams](#streaming) [`log`][docs.data-model#log] events to a file. |\n-| [**`http`**][docs.sinks.http] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a generic HTTP endpoint. |\n-| [**`kafka`**][docs.sinks.kafka] | [Streams](#streaming) [`log`][docs.data-model#log] events to [Apache Kafka][urls.kafka] via the [Kafka protocol][urls.kafka_protocol]. |\n-| [**`prometheus`**][docs.sinks.prometheus] | [Exposes](#exposing-and-scraping) [`metric`][docs.data-model#metric] events to [Prometheus][urls.prometheus] metrics service. |\n-| [**`splunk_hec`**][docs.sinks.splunk_hec] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a [Splunk HTTP Event Collector][urls.splunk_hec]. |\n-| [**`statsd`**][docs.sinks.statsd] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [StatsD][urls.statsd] metrics service. |\n-| [**`tcp`**][docs.sinks.tcp] | [Streams](#streaming) [`log`][docs.data-model#log] events to a TCP connection. |\n-| [**`vector`**][docs.sinks.vector] | [Streams](#streaming) [`log`][docs.data-model#log] events to another downstream [`vector` source][docs.sources.vector]. |\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright 2019, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\n@@ -200,8 +123,6 @@ the License.\n [docs.configuration]: https://vector.dev/docs/setup/configuration\n [docs.containers]: https://vector.dev/docs/setup/installation/containers\n [docs.correctness]: https://vector.dev/docs/about/correctness\n-[docs.data-model#log]: https://vector.dev/docs/about/data-model#log\n-[docs.data-model#metric]: https://vector.dev/docs/about/data-model#metric\n [docs.data-model.log]: https://vector.dev/docs/about/data-model/log\n [docs.data-model.metric]: https://vector.dev/docs/about/data-model/metric\n [docs.data_model]: https://vector.dev/docs/about/data-model\n@@ -209,6 +130,7 @@ the License.\n [docs.from-archives]: https://vector.dev/docs/setup/installation/manual/from-archives\n [docs.from-source]: https://vector.dev/docs/setup/installation/manual/from-source\n [docs.guarantees]: https://vector.dev/docs/about/guarantees\n+[docs.guides.getting_started]: https://vector.dev/docs/setup/guides/getting-started\n [docs.guides]: https://vector.dev/docs/setup/guides\n [docs.installation]: https://vector.dev/docs/setup/installation\n [docs.monitoring]: https://vector.dev/docs/administration/monitoring\n@@ -224,72 +146,25 @@ the License.\n [docs.sinks.aws_cloudwatch_metrics]: https://vector.dev/docs/reference/sinks/aws_cloudwatch_metrics\n [docs.sinks.aws_kinesis_streams]: https://vector.dev/docs/reference/sinks/aws_kinesis_streams\n [docs.sinks.aws_s3]: https://vector.dev/docs/reference/sinks/aws_s3\n-[docs.sinks.blackhole]: https://vector.dev/docs/reference/sinks/blackhole\n [docs.sinks.clickhouse]: https://vector.dev/docs/reference/sinks/clickhouse\n-[docs.sinks.console]: https://vector.dev/docs/reference/sinks/console\n-[docs.sinks.datadog_metrics]: https://vector.dev/docs/reference/sinks/datadog_metrics\n [docs.sinks.elasticsearch]: https://vector.dev/docs/reference/sinks/elasticsearch\n-[docs.sinks.file]: https://vector.dev/docs/reference/sinks/file\n-[docs.sinks.http]: https://vector.dev/docs/reference/sinks/http\n-[docs.sinks.kafka]: https://vector.dev/docs/reference/sinks/kafka\n-[docs.sinks.prometheus]: https://vector.dev/docs/reference/sinks/prometheus\n-[docs.sinks.splunk_hec]: https://vector.dev/docs/reference/sinks/splunk_hec\n-[docs.sinks.statsd]: https://vector.dev/docs/reference/sinks/statsd\n-[docs.sinks.tcp]: https://vector.dev/docs/reference/sinks/tcp\n-[docs.sinks.vector]: https://vector.dev/docs/reference/sinks/vector\n [docs.sinks]: https://vector.dev/docs/reference/sinks\n [docs.sources.docker]: https://vector.dev/docs/reference/sources/docker\n [docs.sources.file]: https://vector.dev/docs/reference/sources/file\n [docs.sources.journald]: https://vector.dev/docs/reference/sources/journald\n [docs.sources.kafka]: https://vector.dev/docs/reference/sources/kafka\n-[docs.sources.statsd]: https://vector.dev/docs/reference/sources/statsd\n-[docs.sources.stdin]: https://vector.dev/docs/reference/sources/stdin\n-[docs.sources.syslog]: https://vector.dev/docs/reference/sources/syslog\n-[docs.sources.tcp]: https://vector.dev/docs/reference/sources/tcp\n-[docs.sources.udp]: https://vector.dev/docs/reference/sources/udp\n-[docs.sources.vector]: https://vector.dev/docs/reference/sources/vector\n [docs.sources]: https://vector.dev/docs/reference/sources\n [docs.topologies]: https://vector.dev/docs/setup/deployment/topologies\n-[docs.transforms.add_fields]: https://vector.dev/docs/reference/transforms/add_fields\n-[docs.transforms.add_tags]: https://vector.dev/docs/reference/transforms/add_tags\n-[docs.transforms.coercer]: https://vector.dev/docs/reference/transforms/coercer\n-[docs.transforms.field_filter]: https://vector.dev/docs/reference/transforms/field_filter\n-[docs.transforms.grok_parser]: https://vector.dev/docs/reference/transforms/grok_parser\n [docs.transforms.json_parser]: https://vector.dev/docs/reference/transforms/json_parser\n [docs.transforms.log_to_metric]: https://vector.dev/docs/reference/transforms/log_to_metric\n [docs.transforms.lua]: https://vector.dev/docs/reference/transforms/lua\n [docs.transforms.regex_parser]: https://vector.dev/docs/reference/transforms/regex_parser\n-[docs.transforms.remove_fields]: https://vector.dev/docs/reference/transforms/remove_fields\n-[docs.transforms.remove_tags]: https://vector.dev/docs/reference/transforms/remove_tags\n-[docs.transforms.sampler]: https://vector.dev/docs/reference/transforms/sampler\n-[docs.transforms.split]: https://vector.dev/docs/reference/transforms/split\n-[docs.transforms.tokenizer]: https://vector.dev/docs/reference/transforms/tokenizer\n [docs.transforms]: https://vector.dev/docs/reference/transforms\n [docs.updating]: https://vector.dev/docs/administration/updating\n [docs.use_cases]: https://vector.dev/docs/use_cases\n [docs.validating]: https://vector.dev/docs/administration/validating\n-[urls.aws_cw_logs]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html\n-[urls.aws_cw_metrics]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/working_with_metrics.html\n-[urls.aws_kinesis_data_streams]: https://aws.amazon.com/kinesis/data-streams/\n-[urls.aws_s3]: https://aws.amazon.com/s3/\n-[urls.clickhouse]: https://clickhouse.yandex/\n-[urls.clickhouse_http]: https://clickhouse.yandex/docs/en/interfaces/http/\n-[urls.datadog]: https://www.datadoghq.com\n-[urls.elasticsearch]: https://www.elastic.co/products/elasticsearch\n-[urls.grok]: http://grokdebug.herokuapp.com/\n-[urls.kafka]: https://kafka.apache.org/\n-[urls.kafka_protocol]: https://kafka.apache.org/protocol\n-[urls.lua]: https://www.lua.org/\n [urls.mailing_list]: https://vector.dev/mailing_list/\n-[urls.new_sink]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_source]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_transform]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.prometheus]: https://prometheus.io/\n-[urls.regex]: https://en.wikipedia.org/wiki/Regular_expression\n [urls.rust]: https://www.rust-lang.org/\n-[urls.splunk_hec]: http://dev.splunk.com/view/event-collector/SP-CAAAE6M\n-[urls.standard_streams]: https://en.wikipedia.org/wiki/Standard_streams\n-[urls.statsd]: https://github.com/statsd/statsd\n [urls.test_harness]: https://github.com/timberio/vector-test-harness/\n [urls.v0.5.0]: https://github.com/timberio/vector/releases/tag/v0.5.0\n [urls.vector_changelog]: https://github.com/timberio/vector/blob/master/CHANGELOG.md\ndiff --git a/README.md.erb b/README.md.erb\nindex 3b14aa0..cc241eb 100644\n--- a/README.md.erb\n+++ b/README.md.erb\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - <%= common_component_links(:source) %>\n+* [**Transforms**][docs.transforms] - <%= common_component_links(:transform) %>\n+* [**Sinks**][docs.sinks] - <%= common_component_links(:sink) %>\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,44 +110,6 @@ Run the following in your terminal, then follow the on-screen instructions.\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-<%= components_table(metadata.sources.to_h.values.sort) %>\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-<%= components_table(metadata.transforms.to_h.values.sort) %>\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-<%= components_table(metadata.sinks.to_h.values.sort) %>\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright <%= Time.now.year %>, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\ndiff --git a/scripts/generate/templates.rb b/scripts/generate/templates.rb\nindex e5e7ce7..c793ae0 100644\n--- a/scripts/generate/templates.rb\n+++ b/scripts/generate/templates.rb\n@@ -89,6 +89,23 @@ class Templates\n render(\"#{partials_path}/_commit_type_toc_item.md\", binding).gsub(/,$/, \"\")\n end\n \n+ def common_component_links(type, limit = 5)\n+ common = metadata.send(\"#{type.to_s.pluralize}_list\").select(&:common?)\n+\n+ links =\n+ common[0..limit].collect do |component|\n+ \"[#{component.name}][docs.#{type.to_s.pluralize}.#{component.name}]\"\n+ end\n+\n+ num_leftover = common.size - links.size\n+\n+ if num_leftover > 0\n+ links << \"and [15 more][docs.#{type.to_s.pluralize}]\"\n+ end\n+\n+ links.join(\", \")\n+ end\n+\n def component_config_example(component)\n render(\"#{partials_path}/_component_config_example.md\", binding).strip\n end\ndiff --git a/scripts/util/metadata/component.rb b/scripts/util/metadata/component.rb\nindex 0873b2e..4dc5650 100644\n--- a/scripts/util/metadata/component.rb\n+++ b/scripts/util/metadata/component.rb\n@@ -9,6 +9,7 @@ class Component\n include Comparable\n \n attr_reader :beta,\n+ :common,\n :function_category,\n :id,\n :name,\n@@ -18,6 +19,7 @@ class Component\n \n def initialize(hash)\n @beta = hash[\"beta\"] == true\n+ @common = hash[\"common\"] == true\n @function_category = hash.fetch(\"function_category\")\n @name = hash.fetch(\"name\")\n @type ||= self.class.name.downcase\n@@ -71,6 +73,10 @@ class Component\n beta == true\n end\n \n+ def common?\n+ common == true\n+ end\n+\n def context_options\n options_list.select(&:context?)\n end\ndiff --git a/website/src/components/VectorComponents/index.js b/website/src/components/VectorComponents/index.js\nindex b6c5c13..d3c9adf 100644\n--- a/website/src/components/VectorComponents/index.js\n+++ b/website/src/components/VectorComponents/index.js\n@@ -154,7 +154,7 @@ function VectorComponents(props) {\n //\n \n const [onlyAtLeastOnce, setOnlyAtLeastOnce] = useState(queryObj['at-least-once'] == 'true');\n- const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['providers']));\n+ const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['functions']));\n const [onlyLog, setOnlyLog] = useState(queryObj['log'] == 'true');\n const [onlyMetric, setOnlyMetric] = useState(queryObj['metric'] == 'true');\n const [onlyProductionReady, setOnlyProductionReady] = useState(queryObj['prod-ready'] == 'true');\n", "diff --git a/kousa/lib/broth/socket_handler.ex b/kousa/lib/broth/socket_handler.ex\nindex d142135..5828f30 100644\n--- a/kousa/lib/broth/socket_handler.ex\n+++ b/kousa/lib/broth/socket_handler.ex\n@@ -22,7 +22,7 @@ defmodule Broth.SocketHandler do\n ## initialization boilerplate\n \n @impl true\n- def init(request = %{peer: {ip, _reverse_port}}, _state) do\n+ def init(request, _state) do\n props = :cowboy_req.parse_qs(request)\n \n compression =\n@@ -37,10 +37,16 @@ defmodule Broth.SocketHandler do\n _ -> :json\n end\n \n+ ip =\n+ case request.headers do\n+ %{\"x-forwarded-for\" => v} -> v\n+ _ -> nil\n+ end\n+\n state = %__MODULE__{\n awaiting_init: true,\n user_id: nil,\n- ip: IP.to_string(ip),\n+ ip: ip,\n encoding: encoding,\n compression: compression,\n callers: get_callers(request)\ndiff --git a/kousa/test/_support/ws_client.ex b/kousa/test/_support/ws_client.ex\nindex aeca704..125da17 100644\n--- a/kousa/test/_support/ws_client.ex\n+++ b/kousa/test/_support/ws_client.ex\n@@ -19,7 +19,9 @@ defmodule BrothTest.WsClient do\n \n @api_url\n |> Path.join(\"socket\")\n- |> WebSockex.start_link(__MODULE__, nil, extra_headers: [{\"user-agent\", ancestors}])\n+ |> WebSockex.start_link(__MODULE__, nil,\n+ extra_headers: [{\"user-agent\", ancestors}, {\"x-forwarded-for\", \"127.0.0.1\"}]\n+ )\n end\n \n ###########################################################################\n", "diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml\nindex e0ccd62..1236f58 100644\n--- a/.github/workflows/update-deps.yml\n+++ b/.github/workflows/update-deps.yml\n@@ -13,6 +13,7 @@ jobs:\n - nixpkgs\n - poetry2nix\n - pre-commit-hooks\n+ - gitignore.nix\n steps:\n - name: Checkout\n uses: actions/checkout@v2\n"]
5
["b7e2330fa3a8d7b8a9bff01b707c44e64b845c7b", "6f0cf049caa1a7982669ee685e86621452686551", "662c5d1346ea2b01c0bc3c11c648cbdf92035fe2", "2f5718743a830d40ddf272ad46f253dbb6d08cff", "c444fdb9e85ce44c5c0c99addc777dd7b6085153"]
["test", "feat", "docs", "fix", "ci"]
added resize observer, this will replace window.resize if available,wire up fixed null encoding,update pr condition,add missing region to cloudformation_stack_set,lint README
["diff --git a/engine/src/Utils/EventListeners.ts b/engine/src/Utils/EventListeners.ts\nindex 9e7b189..a29cab4 100644\n--- a/engine/src/Utils/EventListeners.ts\n+++ b/engine/src/Utils/EventListeners.ts\n@@ -47,6 +47,7 @@ export class EventListeners {\n \n private canPush: boolean;\n private resizeTimeout?: NodeJS.Timeout;\n+ private resizeObserver?: ResizeObserver;\n \n /**\n * Events listener constructor\n@@ -144,7 +145,31 @@ export class EventListeners {\n }\n \n if (options.interactivity.events.resize) {\n- manageListener(window, Constants.resizeEvent, this.resizeHandler, add);\n+ if (typeof ResizeObserver !== \"undefined\") {\n+ if (this.resizeObserver && !add) {\n+ if (container.canvas.element) {\n+ this.resizeObserver.unobserve(container.canvas.element);\n+ }\n+\n+ this.resizeObserver.disconnect();\n+\n+ delete this.resizeObserver;\n+ } else if (!this.resizeObserver && add && container.canvas.element) {\n+ this.resizeObserver = new ResizeObserver((entries) => {\n+ const entry = entries.find((e) => e.target === container.canvas.element);\n+\n+ if (!entry) {\n+ return;\n+ }\n+\n+ this.handleWindowResize();\n+ });\n+\n+ this.resizeObserver.observe(container.canvas.element);\n+ }\n+ } else {\n+ manageListener(window, Constants.resizeEvent, this.resizeHandler, add);\n+ }\n }\n \n if (document) {\n", "diff --git a/read_buffer/src/row_group.rs b/read_buffer/src/row_group.rs\nindex 91c9fb5..ca77f3c 100644\n--- a/read_buffer/src/row_group.rs\n+++ b/read_buffer/src/row_group.rs\n@@ -958,24 +958,15 @@ impl From<RecordBatch> for RowGroup {\n }\n Some(InfluxColumnType::Field(_)) => {\n let column_data = match arrow_column.data_type() {\n- arrow::datatypes::DataType::Int64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Int64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::Float64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Float64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::UInt64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::UInt64Array>()\n- .unwrap(),\n- ),\n+ arrow::datatypes::DataType::Int64 => {\n+ Column::from(arrow::array::Int64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::Float64 => {\n+ Column::from(arrow::array::Float64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::UInt64 => {\n+ Column::from(arrow::array::UInt64Array::from(arrow_column.data()))\n+ }\n dt => unimplemented!(\n \"data type {:?} currently not supported for field columns\",\n dt\n", "diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml\nindex 697ca8e..23f4475 100644\n--- a/.github/workflows/release-pr.yml\n+++ b/.github/workflows/release-pr.yml\n@@ -3,7 +3,6 @@ name: release\n on:\n issue_comment:\n types: [created]\n- contains: \"/trigger release\"\n \n env:\n # 7 GiB by default on GitHub, setting to 6 GiB\n@@ -11,6 +10,7 @@ env:\n \n jobs:\n release-pr:\n+ if: ${{ github.event.issue.pull_request && github.event.comment.body == '/trigger release' }}\n permissions:\n id-token: write\n runs-on: ubuntu-latest\n", "diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n", "diff --git a/README.md b/README.md\nindex a163c83..9cd12bc 100644\n--- a/README.md\n+++ b/README.md\n@@ -26,7 +26,7 @@ Ibis has three primary components:\n Ibis aims to be a future-proof solution to interacting with data using Python and can accomplish this goal through its main features:\n \n - **Familiar API**: Ibis\u2019s API design borrows from popular APIs like pandas and dplyr that most users already know and like to use.\n-- **Consistent syntax**: Ibis aims to be universal Python API for tabular data, big or small.\n+- **Consistent syntax**: Ibis aims to be universal Python API for tabular data, big or small.\n - **Deferred execution**: Ibis pushes code execution to the query engine and only moves required data into memory when it has to.\n This leads to more faster, more efficient analytics workflows\n - **Interactive mode**: Ibis also provides an interactive mode, in which users can quickly diagnose problems, do exploratory data analysis, and mock up workflows locally.\n"]
5
["4197f2654e8767039dbfd66eca34f261ee3d88c8", "28b596b8834d1b51be3ac6a2ac30df28f37702d8", "f8c7b34bdeedcf1a4628cd50b23920afeaf57cb6", "304d0588f634e9e72087a706367c53af9c7f7180", "cab2cf4d3fffb0ec2b56d455e67ac5fa992b4104"]
["feat", "refactor", "ci", "fix", "docs"]
fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>,use ng2 loadNextToLocation,tests,add workingDirectory option to shell.openExternal() (#15065) Allows passing `workingDirectory` to the underlying `ShellExecuteW` API on Windows._x000D_ _x000D_ the motivation is that by default `ShellExecute` would use the current working directory, which would get locked on Windows and can prevent autoUpdater from working correctly. We need to be able specify a different `workingDirectory` to prevent this situation.,correctly read new last flushed index
["diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n", "diff --git a/ionic/components/nav/nav-controller.ts b/ionic/components/nav/nav-controller.ts\nindex 8e23c4c..37ac0f4 100644\n--- a/ionic/components/nav/nav-controller.ts\n+++ b/ionic/components/nav/nav-controller.ts\n@@ -527,41 +527,13 @@ export class NavController extends Ion {\n * @private\n * TODO\n */\n- createViewComponentRef(type, hostProtoViewRef, viewContainer, viewCtrlBindings) {\n- let bindings = this.bindings.concat(viewCtrlBindings);\n-\n- // the same guts as DynamicComponentLoader.loadNextToLocation\n- var hostViewRef =\n- viewContainer.createHostView(hostProtoViewRef, viewContainer.length, bindings);\n- var newLocation = this._viewManager.getHostElement(hostViewRef);\n- var component = this._viewManager.getComponent(newLocation);\n-\n- var dispose = () => {\n- var index = viewContainer.indexOf(hostViewRef);\n- if (index !== -1) {\n- viewContainer.remove(index);\n- }\n- };\n-\n- // TODO: make-shift ComponentRef_, this is pretty much going to\n- // break in future versions of ng2, keep an eye on it\n- return {\n- location: newLocation,\n- instance: component,\n- dispose: dispose\n- };\n- }\n-\n- /**\n- * @private\n- * TODO\n- */\n- getBindings(viewCtrl) {\n- // create bindings to this ViewController and its NavParams\n- return this.bindings.concat(Injector.resolve([\n+ loadNextToAnchor(type, location, viewCtrl) {\n+ let bindings = this.bindings.concat(Injector.resolve([\n bind(ViewController).toValue(viewCtrl),\n bind(NavParams).toValue(viewCtrl.params),\n ]));\n+\n+ return this._loader.loadNextToLocation(type, location, bindings);\n }\n \n /**\ndiff --git a/ionic/components/nav/nav.ts b/ionic/components/nav/nav.ts\nindex a98a4ef..063eeb9 100644\n--- a/ionic/components/nav/nav.ts\n+++ b/ionic/components/nav/nav.ts\n@@ -192,65 +192,70 @@ export class Nav extends NavController {\n if (structure.tabs) {\n // the component being loaded is an <ion-tabs>\n // Tabs is essentially a pane, cuz it has its own navbar and content containers\n- let contentContainerRef = this._viewManager.getViewContainer(this.anchorElementRef());\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, contentContainerRef, this.getBindings(viewCtrl));\n- viewComponentRef.instance._paneView = true;\n+ this.loadNextToAnchor(componentType, this.anchorElementRef(), viewCtrl).then(componentRef => {\n \n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ componentRef.instance._paneView = true;\n+\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n+\n+ viewCtrl.onReady().then(() => {\n+ done();\n+ });\n \n- viewCtrl.onReady().then(() => {\n- done();\n });\n \n } else {\n // normal ion-view going into pane\n this.getPane(structure, viewCtrl, (pane) => {\n // add the content of the view into the pane's content area\n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, pane.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n+ this.loadNextToAnchor(componentType, pane.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // remove the pane if there are no view items left\n- pane.totalViews--;\n- if (pane.totalViews === 0) {\n- pane.dispose && pane.dispose();\n- }\n- });\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n \n- // count how many ViewControllers are in this pane\n- pane.totalViews++;\n+ // remove the pane if there are no view items left\n+ pane.totalViews--;\n+ if (pane.totalViews === 0) {\n+ pane.dispose && pane.dispose();\n+ }\n+ });\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ // count how many ViewControllers are in this pane\n+ pane.totalViews++;\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = pane.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = pane.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+ });\n \n- done();\n });\n }\n }\n@@ -273,7 +278,7 @@ export class Nav extends NavController {\n \n } else {\n // create a new nav pane\n- this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.getBindings(viewCtrl)).then(componentRef => {\n+ this._loader.loadNextToLocation(Pane, this.anchorElementRef(), this.bindings).then(componentRef => {\n \n // get the pane reference\n pane = this.newPane;\n@@ -354,17 +359,6 @@ export class Nav extends NavController {\n \n /**\n * @private\n- * TODO\n- * @param {TODO} elementBinder TODO\n- * @param {TODO} id TODO\n- * @return {TODO} TODO\n- */\n-function isComponent(elementBinder, id) {\n- return (elementBinder && elementBinder.componentDirective && elementBinder.componentDirective.metadata.id == id);\n-}\n-\n-/**\n- * @private\n */\n @Directive({selector: 'template[pane-anchor]'})\n class NavPaneAnchor {\n@@ -393,9 +387,9 @@ class NavBarAnchor {\n class ContentAnchor {\n constructor(\n @Host() @Inject(forwardRef(() => Pane)) pane: Pane,\n- viewContainerRef: ViewContainerRef\n+ elementRef: ElementRef\n ) {\n- pane.contentContainerRef = viewContainerRef;\n+ pane.contentAnchorRef = elementRef;\n }\n }\n \ndiff --git a/ionic/components/tabs/tab.ts b/ionic/components/tabs/tab.ts\nindex aa21cad..af5d190 100644\n--- a/ionic/components/tabs/tab.ts\n+++ b/ionic/components/tabs/tab.ts\n@@ -153,40 +153,44 @@ export class Tab extends NavController {\n \n loadContainer(componentType, hostProtoViewRef, viewCtrl, done) {\n \n- let viewComponentRef = this.createViewComponentRef(componentType, hostProtoViewRef, this.contentContainerRef, this.getBindings(viewCtrl));\n- viewCtrl.disposals.push(() => {\n- viewComponentRef.dispose();\n- });\n+ this.loadNextToAnchor(componentType, this.contentAnchorRef, viewCtrl).then(componentRef => {\n \n- // a new ComponentRef has been created\n- // set the ComponentRef's instance to this ViewController\n- viewCtrl.setInstance(viewComponentRef.instance);\n+ viewCtrl.disposals.push(() => {\n+ componentRef.dispose();\n+ });\n \n- // remember the ElementRef to the content that was just created\n- viewCtrl.viewElementRef(viewComponentRef.location);\n+ // a new ComponentRef has been created\n+ // set the ComponentRef's instance to this ViewController\n+ viewCtrl.setInstance(componentRef.instance);\n \n- // get the NavController's container for navbars, which is\n- // the place this NavController will add each ViewController's navbar\n- let navbarContainerRef = this.tabs.navbarContainerRef;\n+ // remember the ElementRef to the content that was just created\n+ viewCtrl.viewElementRef(componentRef.location);\n \n- // get this ViewController's navbar TemplateRef, which may not\n- // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n- let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n+ // get the NavController's container for navbars, which is\n+ // the place this NavController will add each ViewController's navbar\n+ let navbarContainerRef = this.tabs.navbarContainerRef;\n \n- // create the navbar view if the pane has a navbar container, and the\n- // ViewController's instance has a navbar TemplateRef to go to inside of it\n- if (navbarContainerRef && navbarTemplateRef) {\n- let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+ // get this ViewController's navbar TemplateRef, which may not\n+ // exist if the ViewController's template didn't have an <ion-navbar *navbar>\n+ let navbarTemplateRef = viewCtrl.getNavbarTemplateRef();\n \n- viewCtrl.disposals.push(() => {\n- let index = navbarContainerRef.indexOf(navbarView);\n- if (index > -1) {\n- navbarContainerRef.remove(index);\n- }\n- });\n- }\n+ // create the navbar view if the pane has a navbar container, and the\n+ // ViewController's instance has a navbar TemplateRef to go to inside of it\n+ if (navbarContainerRef && navbarTemplateRef) {\n+ let navbarView = navbarContainerRef.createEmbeddedView(navbarTemplateRef, -1);\n+\n+ viewCtrl.disposals.push(() => {\n+ let index = navbarContainerRef.indexOf(navbarView);\n+ if (index > -1) {\n+ navbarContainerRef.remove(index);\n+ }\n+ });\n+ }\n+\n+ done();\n+\n+ });\n \n- done();\n }\n \n }\n@@ -194,10 +198,7 @@ export class Tab extends NavController {\n \n @Directive({selector: 'template[content-anchor]'})\n class TabContentAnchor {\n- constructor(\n- @Host() tab: Tab,\n- viewContainerRef: ViewContainerRef\n- ) {\n- tab.contentContainerRef = viewContainerRef;\n+ constructor(@Host() tab: Tab, elementRef: ElementRef) {\n+ tab.contentAnchorRef = elementRef;\n }\n }\n", "diff --git a/client/src/components/Profile/__test__/EducationCard.test.tsx b/client/src/components/Profile/__test__/EducationCard.test.tsx\nindex 44b6e00..14539dd 100644\n--- a/client/src/components/Profile/__test__/EducationCard.test.tsx\n+++ b/client/src/components/Profile/__test__/EducationCard.test.tsx\n@@ -53,7 +53,7 @@ describe('EducationCard', () => {\n });\n \n describe('filterPermissions', () => {\n- it('should left only contacts in \"permissionsSettings\" object', () => {\n+ it('should left only \"isEducationVisible\" in \"permissionsSettings\" object', () => {\n const permissionsSettings = {\n isProfileVisible: { all: true },\n isAboutVisible: { all: true, mentor: true, student: true },\ndiff --git a/client/src/components/Profile/__test__/MainCard.test.tsx b/client/src/components/Profile/__test__/MainCard.test.tsx\nindex 8fb2840..552804b 100644\n--- a/client/src/components/Profile/__test__/MainCard.test.tsx\n+++ b/client/src/components/Profile/__test__/MainCard.test.tsx\n@@ -3,6 +3,8 @@ import { shallow } from 'enzyme';\n import { shallowToJson } from 'enzyme-to-json';\n import MainCard from '../MainCard';\n \n+// TODO: Known Issue: https://stackoverflow.com/questions/59942808/how-can-i-use-jest-coverage-in-next-js-styled-jsx\n+\n describe('MainCard', () => {\n describe('Should render correctly', () => {\n it('if is editing mode disabled', () => {\n@@ -21,49 +23,89 @@ describe('MainCard', () => {\n );\n expect(shallowToJson(output)).toMatchSnapshot();\n });\n+ it('if is editing mode enabled', () => {\n+ const output = shallow(\n+ <MainCard\n+ data={{\n+ name: 'Petr Pervyi',\n+ githubId: 'piter',\n+ locationName: 'SPB',\n+ locationId: '1',\n+ }}\n+ isEditingModeEnabled={true}\n+ onPermissionsSettingsChange={() => {}}\n+ onProfileSettingsChange={() => {}}\n+ />,\n+ );\n+ expect(shallowToJson(output)).toMatchSnapshot();\n+ });\n });\n \n- // const wrapper = shallow(\n- // <MainCard\n- // data={{\n- // name: 'Petr Pervyi',\n- // githubId: 'piter',\n- // locationName: 'SPB',\n- // locationId: '1',\n- // }}\n- // isEditingModeEnabled={false}\n- // onPermissionsSettingsChange={() => {}}\n- // onProfileSettingsChange={() => {}}\n- // />);\n- // const instance = wrapper.instance();\n- // describe('showVisibilitySettings', () => {\n- // it('should set \"state.isVisibilitySettingsVisible\" as \"true\"', () => {\n- // expect(instance.state.isVisibilitySettingsVisible).toBe(false);\n- // instance.showVisibilitySettings();\n- // expect(instance.state.isVisibilitySettingsVisible).toBe(true);\n- // });\n- // });\n- // describe('hideVisibilitySettings', () => {\n- // it('should set \"state.isVisibilitySettingsVisible\" as \"false\"', () => {\n- // instance.state.isVisibilitySettingsVisible = true;\n- // expect(instance.state.isVisibilitySettingsVisible).toBe(true);\n- // instance.hideVisibilitySettings();\n- // expect(instance.state.isVisibilitySettingsVisible).toBe(false);\n- // });\n- // });\n- // describe('showProfileSettings', () => {\n- // it('should set \"state.isProfileSettingsVisible\" as \"true\"', () => {\n- // expect(instance.state.isProfileSettingsVisible).toBe(false);\n- // instance.showProfileSettings();\n- // expect(instance.state.isProfileSettingsVisible).toBe(true);\n- // });\n- // });\n- // describe('hideProfileSettings', () => {\n- // it('should set \"state.isProfileSettingsVisible\" as \"false\"', () => {\n- // instance.state.isProfileSettingsVisible = true;\n- // expect(instance.state.isProfileSettingsVisible).toBe(true);\n- // instance.hideProfileSettings();\n- // expect(instance.state.isProfileSettingsVisible).toBe(false);\n- // });\n- // });\n+ const wrapper = shallow(\n+ <MainCard\n+ data={{\n+ name: 'Petr Pervyi',\n+ githubId: 'piter',\n+ locationName: 'SPB',\n+ locationId: '1',\n+ }}\n+ isEditingModeEnabled={false}\n+ onPermissionsSettingsChange={() => {}}\n+ onProfileSettingsChange={() => {}}\n+ />);\n+ const instance = wrapper.instance();\n+ describe('showVisibilitySettings', () => {\n+ it('should set \"state.isVisibilitySettingsVisible\" as \"true\"', () => {\n+ expect(instance.state.isVisibilitySettingsVisible).toBe(false);\n+ instance.showVisibilitySettings();\n+ expect(instance.state.isVisibilitySettingsVisible).toBe(true);\n+ });\n+ });\n+ describe('hideVisibilitySettings', () => {\n+ it('should set \"state.isVisibilitySettingsVisible\" as \"false\"', () => {\n+ instance.state.isVisibilitySettingsVisible = true;\n+ expect(instance.state.isVisibilitySettingsVisible).toBe(true);\n+ instance.hideVisibilitySettings();\n+ expect(instance.state.isVisibilitySettingsVisible).toBe(false);\n+ });\n+ });\n+ describe('showProfileSettings', () => {\n+ it('should set \"state.isProfileSettingsVisible\" as \"true\"', () => {\n+ expect(instance.state.isProfileSettingsVisible).toBe(false);\n+ instance.showProfileSettings();\n+ expect(instance.state.isProfileSettingsVisible).toBe(true);\n+ });\n+ });\n+ describe('hideProfileSettings', () => {\n+ it('should set \"state.isProfileSettingsVisible\" as \"false\"', () => {\n+ instance.state.isProfileSettingsVisible = true;\n+ expect(instance.state.isProfileSettingsVisible).toBe(true);\n+ instance.hideProfileSettings();\n+ expect(instance.state.isProfileSettingsVisible).toBe(false);\n+ });\n+ });\n+ describe('filterPermissions', () => {\n+ it('should left only \"isProfileVisible\" in \"permissionsSettings\" object', () => {\n+ const permissionsSettings = {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { all: true, mentor: true, student: true },\n+ isEducationVisible: { all: true, mentor: true, student: true },\n+ isEnglishVisible: { all: false, student: false },\n+ isEmailVisible: { all: true, student: true },\n+ isTelegramVisible: { all: false, student: false },\n+ isSkypeVisible: { all: true, student: true },\n+ isPhoneVisible: { all: false, student: false },\n+ isContactsNotesVisible: { all: true, student: true },\n+ isLinkedInVisible: { all: false, mentor: false, student: false },\n+ isPublicFeedbackVisible: { all: true, mentor: true, student: true },\n+ isMentorStatsVisible: { all: true, mentor: true, student: true },\n+ isStudentStatsVisible: { all: true, student: true },\n+ };\n+ const instance = wrapper.instance();\n+ const result = instance.filterPermissions(permissionsSettings);\n+ expect(result).toEqual({\n+ isProfileVisible: { all: true },\n+ });\n+ });\n+ });\n });\ndiff --git a/client/src/components/Profile/__test__/__snapshots__/MainCard.test.tsx.snap b/client/src/components/Profile/__test__/__snapshots__/MainCard.test.tsx.snap\nindex 40331eb..fef20dd 100644\n--- a/client/src/components/Profile/__test__/__snapshots__/MainCard.test.tsx.snap\n+++ b/client/src/components/Profile/__test__/__snapshots__/MainCard.test.tsx.snap\n@@ -71,3 +71,158 @@ exports[`MainCard Should render correctly if is editing mode disabled 1`] = `\n </Card>\n </Fragment>\n `;\n+\n+exports[`MainCard Should render correctly if is editing mode enabled 1`] = `\n+<Fragment>\n+ <Card\n+ actions={\n+ Array [\n+ <ForwardRef(EditOutlined)\n+ onClick={[Function]}\n+ />,\n+ <ForwardRef(SettingOutlined)\n+ onClick={[Function]}\n+ />,\n+ ]\n+ }\n+ >\n+ <GithubAvatar\n+ githubId=\"piter\"\n+ size={96}\n+ style={\n+ Object {\n+ \"display\": \"block\",\n+ \"margin\": \"0 auto 10px\",\n+ }\n+ }\n+ />\n+ <Title\n+ level={1}\n+ style={\n+ Object {\n+ \"fontSize\": 24,\n+ \"margin\": 0,\n+ \"textAlign\": \"center\",\n+ }\n+ }\n+ >\n+ Petr Pervyi\n+ </Title>\n+ <Paragraph\n+ style={\n+ Object {\n+ \"marginBottom\": 20,\n+ \"textAlign\": \"center\",\n+ }\n+ }\n+ >\n+ <a\n+ href=\"https://github.com/piter\"\n+ style={\n+ Object {\n+ \"fontSize\": 16,\n+ \"marginLeft\": \"-14px\",\n+ }\n+ }\n+ target=\"_blank\"\n+ >\n+ <ForwardRef(GithubFilled) />\n+ \n+ piter\n+ </a>\n+ </Paragraph>\n+ <Paragraph\n+ style={\n+ Object {\n+ \"margin\": 0,\n+ \"textAlign\": \"center\",\n+ }\n+ }\n+ >\n+ <span\n+ style={\n+ Object {\n+ \"marginLeft\": \"-14px\",\n+ }\n+ }\n+ >\n+ <ForwardRef(EnvironmentFilled) />\n+ \n+ SPB\n+ </span>\n+ </Paragraph>\n+ <PermissionsSettingsDrawer\n+ hideSettings={[Function]}\n+ isSettingsVisible={false}\n+ onPermissionsSettingsChange={[Function]}\n+ />\n+ <ProfileSettingsDrawer\n+ content={\n+ <div>\n+ <p\n+ style={\n+ Object {\n+ \"fontSize\": 18,\n+ \"marginBottom\": 5,\n+ }\n+ }\n+ >\n+ <Text\n+ strong={true}\n+ >\n+ Name:\n+ </Text>\n+ </p>\n+ <p\n+ style={\n+ Object {\n+ \"marginBottom\": 20,\n+ }\n+ }\n+ >\n+ <Input\n+ onChange={[Function]}\n+ placeholder=\"Firstname Lastname\"\n+ type=\"text\"\n+ value=\"Petr Pervyi\"\n+ />\n+ </p>\n+ <p\n+ style={\n+ Object {\n+ \"fontSize\": 18,\n+ \"marginBottom\": 5,\n+ }\n+ }\n+ >\n+ <Text\n+ strong={true}\n+ >\n+ Location:\n+ </Text>\n+ </p>\n+ <div\n+ style={\n+ Object {\n+ \"marginBottom\": 5,\n+ }\n+ }\n+ >\n+ <LocationSelect\n+ defaultValue=\"1\"\n+ onChange={[Function]}\n+ style={\n+ Object {\n+ \"width\": \"100%\",\n+ }\n+ }\n+ />\n+ </div>\n+ </div>\n+ }\n+ hideSettings={[Function]}\n+ isSettingsVisible={false}\n+ />\n+ </Card>\n+</Fragment>\n+`;\ndiff --git a/client/src/jest.config.js b/client/src/jest.config.js\nindex df39788..654f9f3 100644\n--- a/client/src/jest.config.js\n+++ b/client/src/jest.config.js\n@@ -7,4 +7,5 @@ module.exports = {\n '^services(.*)$': '<rootDir>/services/$1',\n '^utils(.*)$': '<rootDir>/utils/$1',\n },\n+ verbose: true,\n };\n", "diff --git a/atom/browser/atom_browser_client.cc b/atom/browser/atom_browser_client.cc\nindex 97e5f26..df0774b 100644\n--- a/atom/browser/atom_browser_client.cc\n+++ b/atom/browser/atom_browser_client.cc\n@@ -611,7 +611,7 @@ void OnOpenExternal(const GURL& escaped_url, bool allowed) {\n #else\n escaped_url,\n #endif\n- true);\n+ platform_util::OpenExternalOptions());\n }\n \n void HandleExternalProtocolInUI(\ndiff --git a/atom/common/api/atom_api_shell.cc b/atom/common/api/atom_api_shell.cc\nindex 1323cd6..7c67c7a 100644\n--- a/atom/common/api/atom_api_shell.cc\n+++ b/atom/common/api/atom_api_shell.cc\n@@ -60,11 +60,12 @@ bool OpenExternal(\n const GURL& url,\n #endif\n mate::Arguments* args) {\n- bool activate = true;\n+ platform_util::OpenExternalOptions options;\n if (args->Length() >= 2) {\n- mate::Dictionary options;\n- if (args->GetNext(&options)) {\n- options.Get(\"activate\", &activate);\n+ mate::Dictionary obj;\n+ if (args->GetNext(&obj)) {\n+ obj.Get(\"activate\", &options.activate);\n+ obj.Get(\"workingDirectory\", &options.working_dir);\n }\n }\n \n@@ -72,13 +73,13 @@ bool OpenExternal(\n base::Callback<void(v8::Local<v8::Value>)> callback;\n if (args->GetNext(&callback)) {\n platform_util::OpenExternal(\n- url, activate,\n+ url, options,\n base::Bind(&OnOpenExternalFinished, args->isolate(), callback));\n return true;\n }\n }\n \n- return platform_util::OpenExternal(url, activate);\n+ return platform_util::OpenExternal(url, options);\n }\n \n #if defined(OS_WIN)\ndiff --git a/atom/common/platform_util.h b/atom/common/platform_util.h\nindex 6fd8405..6686a4f 100644\n--- a/atom/common/platform_util.h\n+++ b/atom/common/platform_util.h\n@@ -8,6 +8,7 @@\n #include <string>\n \n #include \"base/callback_forward.h\"\n+#include \"base/files/file_path.h\"\n #include \"build/build_config.h\"\n \n #if defined(OS_WIN)\n@@ -16,10 +17,6 @@\n \n class GURL;\n \n-namespace base {\n-class FilePath;\n-}\n-\n namespace platform_util {\n \n typedef base::Callback<void(const std::string&)> OpenExternalCallback;\n@@ -32,6 +29,11 @@ bool ShowItemInFolder(const base::FilePath& full_path);\n // Must be called from the UI thread.\n bool OpenItem(const base::FilePath& full_path);\n \n+struct OpenExternalOptions {\n+ bool activate = true;\n+ base::FilePath working_dir;\n+};\n+\n // Open the given external protocol URL in the desktop's default manner.\n // (For example, mailto: URLs in the default mail user agent.)\n bool OpenExternal(\n@@ -40,7 +42,7 @@ bool OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate);\n+ const OpenExternalOptions& options);\n \n // The asynchronous version of OpenExternal.\n void OpenExternal(\n@@ -49,7 +51,7 @@ void OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback);\n \n // Move a file to trash.\ndiff --git a/atom/common/platform_util_linux.cc b/atom/common/platform_util_linux.cc\nindex 63ee0bd..f17cbda 100644\n--- a/atom/common/platform_util_linux.cc\n+++ b/atom/common/platform_util_linux.cc\n@@ -80,7 +80,7 @@ bool OpenItem(const base::FilePath& full_path) {\n return XDGOpen(full_path.value(), false);\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n // Don't wait for exit, since we don't want to wait for the browser/email\n // client window to close before returning\n if (url.SchemeIs(\"mailto\"))\n@@ -90,10 +90,10 @@ bool OpenExternal(const GURL& url, bool activate) {\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_mac.mm b/atom/common/platform_util_mac.mm\nindex b83b1e1..4cda8bf 100644\n--- a/atom/common/platform_util_mac.mm\n+++ b/atom/common/platform_util_mac.mm\n@@ -139,16 +139,16 @@ bool OpenItem(const base::FilePath& full_path) {\n launchIdentifiers:NULL];\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n DCHECK([NSThread isMainThread]);\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (ns_url)\n- return OpenURL(ns_url, activate).empty();\n+ return OpenURL(ns_url, options.activate).empty();\n return false;\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (!ns_url) {\n@@ -157,13 +157,13 @@ void OpenExternal(const GURL& url,\n }\n \n __block OpenExternalCallback c = callback;\n- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),\n- ^{\n- __block std::string error = OpenURL(ns_url, activate);\n- dispatch_async(dispatch_get_main_queue(), ^{\n- c.Run(error);\n- });\n- });\n+ dispatch_async(\n+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n+ __block std::string error = OpenURL(ns_url, options.activate);\n+ dispatch_async(dispatch_get_main_queue(), ^{\n+ c.Run(error);\n+ });\n+ });\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_win.cc b/atom/common/platform_util_win.cc\nindex 34576be..5712200 100644\n--- a/atom/common/platform_util_win.cc\n+++ b/atom/common/platform_util_win.cc\n@@ -294,15 +294,18 @@ bool OpenItem(const base::FilePath& full_path) {\n return ui::win::OpenFileViaShell(full_path);\n }\n \n-bool OpenExternal(const base::string16& url, bool activate) {\n+bool OpenExternal(const base::string16& url,\n+ const OpenExternalOptions& options) {\n // Quote the input scheme to be sure that the command does not have\n // parameters unexpected by the external program. This url should already\n // have been escaped.\n base::string16 escaped_url = L\"\\\"\" + url + L\"\\\"\";\n+ auto working_dir = options.working_dir.value();\n \n- if (reinterpret_cast<ULONG_PTR>(ShellExecuteW(\n- NULL, L\"open\", escaped_url.c_str(), NULL, NULL, SW_SHOWNORMAL)) <=\n- 32) {\n+ if (reinterpret_cast<ULONG_PTR>(\n+ ShellExecuteW(nullptr, L\"open\", escaped_url.c_str(), nullptr,\n+ working_dir.empty() ? nullptr : working_dir.c_str(),\n+ SW_SHOWNORMAL)) <= 32) {\n // We fail to execute the call. We could display a message to the user.\n // TODO(nsylvain): we should also add a dialog to warn on errors. See\n // bug 1136923.\n@@ -312,10 +315,10 @@ bool OpenExternal(const base::string16& url, bool activate) {\n }\n \n void OpenExternal(const base::string16& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& path) {\ndiff --git a/docs/api/shell.md b/docs/api/shell.md\nindex a469f94..b38348a 100644\n--- a/docs/api/shell.md\n+++ b/docs/api/shell.md\n@@ -37,9 +37,10 @@ Open the given file in the desktop's default manner.\n ### `shell.openExternal(url[, options, callback])`\n \n * `url` String - Max 2081 characters on windows, or the function returns false.\n-* `options` Object (optional) _macOS_\n- * `activate` Boolean - `true` to bring the opened application to the\n- foreground. The default is `true`.\n+* `options` Object (optional)\n+ * `activate` Boolean (optional) - `true` to bring the opened application to the\n+ foreground. The default is `true`. _macOS_\n+ * `workingDirectory` String (optional) - The working directory. _Windows_\n * `callback` Function (optional) _macOS_ - If specified will perform the open asynchronously.\n * `error` Error\n \n", "diff --git a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\nindex 69b06b6..a4fcb77 100644\n--- a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n+++ b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\n@@ -112,7 +112,7 @@ public class PartitionRestoreService {\n SegmentedJournal.builder()\n .withDirectory(dataDirectory.toFile())\n .withName(partition.name())\n- .withLastWrittenIndex(-1)\n+ .withLastFlushedIndex(-1)\n .build()) {\n \n resetJournal(checkpointPosition, journal);\n"]
5
["9ed3c0c4a72af977fc9150512fb6538f20a94b22", "085ee958c48d695ba50822d8767d615fd9e887fa", "f87659953e9af59bc7cb314a22dd076d988ef607", "a9475f359061fcd6cd53557599fedf0df5e9ee00", "5ffc5794808647de14f945141692be26ad143006"]
["docs", "refactor", "test", "feat", "fix"]
ecma 7 ready,disable getGPUInfo() tests on Linux (#14875),fixed start types for size and opacity,add workingDirectory option to shell.openExternal() (#15065) Allows passing `workingDirectory` to the underlying `ShellExecuteW` API on Windows._x000D_ _x000D_ the motivation is that by default `ShellExecute` would use the current working directory, which would get locked on Windows and can prevent autoUpdater from working correctly. We need to be able specify a different `workingDirectory` to prevent this situation.,rename step
["diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex f7c6b23..4a00c65 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -266,7 +266,7 @@ module.exports = {\n : new UglifyJsPlugin({\n uglifyOptions: {\n ie8: false,\n- ecma: 6,\n+ ecma: 7,\n compress: {\n warnings: false,\n // Disabled because of an issue with Uglify breaking seemingly valid code:\n", "diff --git a/spec/api-app-spec.js b/spec/api-app-spec.js\nindex 4ca1fa3..6ab6bd0 100644\n--- a/spec/api-app-spec.js\n+++ b/spec/api-app-spec.js\n@@ -805,6 +805,14 @@ describe('app module', () => {\n })\n \n describe('getGPUInfo() API', () => {\n+ before(function () {\n+ // TODO(alexeykuzmoin): Fails on linux. Enable them back.\n+ // https://github.com/electron/electron/pull/14863\n+ if (process.platform === 'linux') {\n+ this.skip()\n+ }\n+ })\n+\n it('succeeds with basic GPUInfo', (done) => {\n app.getGPUInfo('basic').then((gpuInfo) => {\n // Devices information is always present in the available info\n", "diff --git a/core/main/src/Core/Particle.ts b/core/main/src/Core/Particle.ts\nindex 1aa6fba..6ea6ffc 100644\n--- a/core/main/src/Core/Particle.ts\n+++ b/core/main/src/Core/Particle.ts\n@@ -271,7 +271,7 @@ export class Particle implements IParticle {\n }\n }\n \n- const sizeAnimation = this.options.size.animation;\n+ const sizeAnimation = sizeOptions.animation;\n \n if (sizeAnimation.enable) {\n this.size.status = AnimationStatus.increasing;\n@@ -279,7 +279,8 @@ export class Particle implements IParticle {\n if (!randomSize) {\n switch (sizeAnimation.startValue) {\n case StartValueType.min:\n- this.size.value = sizeAnimation.minimumValue * pxRatio;\n+ this.size.value = NumberUtils.getRangeMin(sizeOptions.value) * pxRatio;\n+ this.size.status = AnimationStatus.increasing;\n \n break;\n \n@@ -287,11 +288,14 @@ export class Particle implements IParticle {\n this.size.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(sizeAnimation.minimumValue * pxRatio, this.size.value)\n );\n+ this.size.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.size.value = NumberUtils.getRangeMax(sizeOptions.value) * pxRatio;\n this.size.status = AnimationStatus.decreasing;\n \n break;\n@@ -393,7 +397,8 @@ export class Particle implements IParticle {\n if (!randomOpacity) {\n switch (opacityAnimation.startValue) {\n case StartValueType.min:\n- this.opacity.value = opacityAnimation.minimumValue;\n+ this.opacity.value = NumberUtils.getRangeMin(this.opacity.value);\n+ this.opacity.status = AnimationStatus.increasing;\n \n break;\n \n@@ -401,11 +406,14 @@ export class Particle implements IParticle {\n this.opacity.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(opacityAnimation.minimumValue, this.opacity.value)\n );\n+ this.opacity.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.opacity.value = NumberUtils.getRangeMax(this.opacity.value);\n this.opacity.status = AnimationStatus.decreasing;\n \n break;\ndiff --git a/presets/confetti/src/options.ts b/presets/confetti/src/options.ts\nindex 7fc6225..a713425 100644\n--- a/presets/confetti/src/options.ts\n+++ b/presets/confetti/src/options.ts\n@@ -28,7 +28,7 @@ export const loadOptions = (confettiOptions: RecursivePartial<IConfettiOptions>)\n animation: {\n enable: true,\n minimumValue: 0,\n- speed: 2,\n+ speed: 0.5,\n startValue: \"max\",\n destroy: \"min\",\n },\n", "diff --git a/atom/browser/atom_browser_client.cc b/atom/browser/atom_browser_client.cc\nindex 97e5f26..df0774b 100644\n--- a/atom/browser/atom_browser_client.cc\n+++ b/atom/browser/atom_browser_client.cc\n@@ -611,7 +611,7 @@ void OnOpenExternal(const GURL& escaped_url, bool allowed) {\n #else\n escaped_url,\n #endif\n- true);\n+ platform_util::OpenExternalOptions());\n }\n \n void HandleExternalProtocolInUI(\ndiff --git a/atom/common/api/atom_api_shell.cc b/atom/common/api/atom_api_shell.cc\nindex 1323cd6..7c67c7a 100644\n--- a/atom/common/api/atom_api_shell.cc\n+++ b/atom/common/api/atom_api_shell.cc\n@@ -60,11 +60,12 @@ bool OpenExternal(\n const GURL& url,\n #endif\n mate::Arguments* args) {\n- bool activate = true;\n+ platform_util::OpenExternalOptions options;\n if (args->Length() >= 2) {\n- mate::Dictionary options;\n- if (args->GetNext(&options)) {\n- options.Get(\"activate\", &activate);\n+ mate::Dictionary obj;\n+ if (args->GetNext(&obj)) {\n+ obj.Get(\"activate\", &options.activate);\n+ obj.Get(\"workingDirectory\", &options.working_dir);\n }\n }\n \n@@ -72,13 +73,13 @@ bool OpenExternal(\n base::Callback<void(v8::Local<v8::Value>)> callback;\n if (args->GetNext(&callback)) {\n platform_util::OpenExternal(\n- url, activate,\n+ url, options,\n base::Bind(&OnOpenExternalFinished, args->isolate(), callback));\n return true;\n }\n }\n \n- return platform_util::OpenExternal(url, activate);\n+ return platform_util::OpenExternal(url, options);\n }\n \n #if defined(OS_WIN)\ndiff --git a/atom/common/platform_util.h b/atom/common/platform_util.h\nindex 6fd8405..6686a4f 100644\n--- a/atom/common/platform_util.h\n+++ b/atom/common/platform_util.h\n@@ -8,6 +8,7 @@\n #include <string>\n \n #include \"base/callback_forward.h\"\n+#include \"base/files/file_path.h\"\n #include \"build/build_config.h\"\n \n #if defined(OS_WIN)\n@@ -16,10 +17,6 @@\n \n class GURL;\n \n-namespace base {\n-class FilePath;\n-}\n-\n namespace platform_util {\n \n typedef base::Callback<void(const std::string&)> OpenExternalCallback;\n@@ -32,6 +29,11 @@ bool ShowItemInFolder(const base::FilePath& full_path);\n // Must be called from the UI thread.\n bool OpenItem(const base::FilePath& full_path);\n \n+struct OpenExternalOptions {\n+ bool activate = true;\n+ base::FilePath working_dir;\n+};\n+\n // Open the given external protocol URL in the desktop's default manner.\n // (For example, mailto: URLs in the default mail user agent.)\n bool OpenExternal(\n@@ -40,7 +42,7 @@ bool OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate);\n+ const OpenExternalOptions& options);\n \n // The asynchronous version of OpenExternal.\n void OpenExternal(\n@@ -49,7 +51,7 @@ void OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback);\n \n // Move a file to trash.\ndiff --git a/atom/common/platform_util_linux.cc b/atom/common/platform_util_linux.cc\nindex 63ee0bd..f17cbda 100644\n--- a/atom/common/platform_util_linux.cc\n+++ b/atom/common/platform_util_linux.cc\n@@ -80,7 +80,7 @@ bool OpenItem(const base::FilePath& full_path) {\n return XDGOpen(full_path.value(), false);\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n // Don't wait for exit, since we don't want to wait for the browser/email\n // client window to close before returning\n if (url.SchemeIs(\"mailto\"))\n@@ -90,10 +90,10 @@ bool OpenExternal(const GURL& url, bool activate) {\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_mac.mm b/atom/common/platform_util_mac.mm\nindex b83b1e1..4cda8bf 100644\n--- a/atom/common/platform_util_mac.mm\n+++ b/atom/common/platform_util_mac.mm\n@@ -139,16 +139,16 @@ bool OpenItem(const base::FilePath& full_path) {\n launchIdentifiers:NULL];\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n DCHECK([NSThread isMainThread]);\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (ns_url)\n- return OpenURL(ns_url, activate).empty();\n+ return OpenURL(ns_url, options.activate).empty();\n return false;\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (!ns_url) {\n@@ -157,13 +157,13 @@ void OpenExternal(const GURL& url,\n }\n \n __block OpenExternalCallback c = callback;\n- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),\n- ^{\n- __block std::string error = OpenURL(ns_url, activate);\n- dispatch_async(dispatch_get_main_queue(), ^{\n- c.Run(error);\n- });\n- });\n+ dispatch_async(\n+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n+ __block std::string error = OpenURL(ns_url, options.activate);\n+ dispatch_async(dispatch_get_main_queue(), ^{\n+ c.Run(error);\n+ });\n+ });\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_win.cc b/atom/common/platform_util_win.cc\nindex 34576be..5712200 100644\n--- a/atom/common/platform_util_win.cc\n+++ b/atom/common/platform_util_win.cc\n@@ -294,15 +294,18 @@ bool OpenItem(const base::FilePath& full_path) {\n return ui::win::OpenFileViaShell(full_path);\n }\n \n-bool OpenExternal(const base::string16& url, bool activate) {\n+bool OpenExternal(const base::string16& url,\n+ const OpenExternalOptions& options) {\n // Quote the input scheme to be sure that the command does not have\n // parameters unexpected by the external program. This url should already\n // have been escaped.\n base::string16 escaped_url = L\"\\\"\" + url + L\"\\\"\";\n+ auto working_dir = options.working_dir.value();\n \n- if (reinterpret_cast<ULONG_PTR>(ShellExecuteW(\n- NULL, L\"open\", escaped_url.c_str(), NULL, NULL, SW_SHOWNORMAL)) <=\n- 32) {\n+ if (reinterpret_cast<ULONG_PTR>(\n+ ShellExecuteW(nullptr, L\"open\", escaped_url.c_str(), nullptr,\n+ working_dir.empty() ? nullptr : working_dir.c_str(),\n+ SW_SHOWNORMAL)) <= 32) {\n // We fail to execute the call. We could display a message to the user.\n // TODO(nsylvain): we should also add a dialog to warn on errors. See\n // bug 1136923.\n@@ -312,10 +315,10 @@ bool OpenExternal(const base::string16& url, bool activate) {\n }\n \n void OpenExternal(const base::string16& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& path) {\ndiff --git a/docs/api/shell.md b/docs/api/shell.md\nindex a469f94..b38348a 100644\n--- a/docs/api/shell.md\n+++ b/docs/api/shell.md\n@@ -37,9 +37,10 @@ Open the given file in the desktop's default manner.\n ### `shell.openExternal(url[, options, callback])`\n \n * `url` String - Max 2081 characters on windows, or the function returns false.\n-* `options` Object (optional) _macOS_\n- * `activate` Boolean - `true` to bring the opened application to the\n- foreground. The default is `true`.\n+* `options` Object (optional)\n+ * `activate` Boolean (optional) - `true` to bring the opened application to the\n+ foreground. The default is `true`. _macOS_\n+ * `workingDirectory` String (optional) - The working directory. _Windows_\n * `callback` Function (optional) _macOS_ - If specified will perform the open asynchronously.\n * `error` Error\n \n", "diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n"]
5
["6aa63c9b8d4dcdbb401743adc3c9a1020d943250", "60ac03c08f942a8dda49b9f9f7d2ce7a63535414", "06960183db42cba1b1f1a8077660ba8c801c9e18", "a9475f359061fcd6cd53557599fedf0df5e9ee00", "34875bc0e59b43d9041903101c823d25ec194a21"]
["build", "test", "fix", "feat", "ci"]
new ShowDebug parameter calculate each segment timing new parameter to show/hide segment debug information set-poshprompt updated with the new showDebug parameter Force disabled segment to be visible for debug purpose,add test for clickhouse-specific `create_table` parameters,abort parallel stages if one failed,remove sync ts check,use module path alias
["diff --git a/engine.go b/engine.go\nindex 6cc1ff3..4617ceb 100644\n--- a/engine.go\n+++ b/engine.go\n@@ -67,6 +67,9 @@ func (e *engine) renderText(text string) {\n \tprefix := e.activeSegment.getValue(Prefix, \" \")\n \tpostfix := e.activeSegment.getValue(Postfix, \" \")\n \te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"%s%s%s\", prefix, text, postfix))\n+\tif *e.env.getArgs().Debug {\n+\t\te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"(%s:%s)\", e.activeSegment.Type, e.activeSegment.timing))\n+\t}\n }\n \n func (e *engine) renderSegmentText(text string) {\n@@ -107,13 +110,11 @@ func (e *engine) setStringValues(segments []*Segment) {\n \twg.Add(len(segments))\n \tdefer wg.Wait()\n \tcwd := e.env.getcwd()\n+\tdebug := *e.env.getArgs().Debug\n \tfor _, segment := range segments {\n \t\tgo func(s *Segment) {\n \t\t\tdefer wg.Done()\n-\t\t\terr := s.mapSegmentWithWriter(e.env)\n-\t\t\tif err == nil && !s.hasValue(IgnoreFolders, cwd) && s.enabled() {\n-\t\t\t\ts.stringValue = s.string()\n-\t\t\t}\n+\t\t\ts.setStringValue(e.env, cwd, debug)\n \t\t}(segment)\n \t}\n }\ndiff --git a/main.go b/main.go\nindex 56ae8a5..d67a640 100644\n--- a/main.go\n+++ b/main.go\n@@ -14,6 +14,7 @@ type args struct {\n \tConfig *string\n \tShell *string\n \tPWD *string\n+\tDebug *bool\n }\n \n func main() {\n@@ -42,6 +43,10 @@ func main() {\n \t\t\t\"pwd\",\n \t\t\t\"\",\n \t\t\t\"the path you are working in\"),\n+\t\tDebug: flag.Bool(\n+\t\t\t\"debug\",\n+\t\t\tfalse,\n+\t\t\t\"Print debug information\"),\n \t}\n \tflag.Parse()\n \tenv := &environment{\ndiff --git a/packages/powershell/oh-my-posh/oh-my-posh.psm1 b/packages/powershell/oh-my-posh/oh-my-posh.psm1\nindex 9234fc6..1450eb3 100644\n--- a/packages/powershell/oh-my-posh/oh-my-posh.psm1\n+++ b/packages/powershell/oh-my-posh/oh-my-posh.psm1\n@@ -5,6 +5,7 @@\n \n $global:PoshSettings = New-Object -TypeName PSObject -Property @{\n Theme = \"$PSScriptRoot\\themes\\jandedobbeleer.json\";\n+ ShowDebug = $false\n }\n \n function Get-PoshCommand {\n@@ -36,9 +37,14 @@ function Set-PoshPrompt {\n param(\n [Parameter(Mandatory = $false)]\n [string]\n- $Theme\n+ $Theme,\n+ [Parameter(Mandatory = $false)]\n+ [bool]\n+ $ShowDebug = $false\n )\n \n+ $global:PoshSettings.ShowDebug = $ShowDebug\n+\n if (Test-Path \"$PSScriptRoot/themes/$Theme.json\") {\n $global:PoshSettings.Theme = \"$PSScriptRoot/themes/$Theme.json\"\n }\n@@ -68,8 +74,9 @@ function Set-PoshPrompt {\n $startInfo = New-Object System.Diagnostics.ProcessStartInfo\n $startInfo.FileName = Get-PoshCommand\n $config = $global:PoshSettings.Theme\n+ $showDebug = $global:PoshSettings.ShowDebug\n $cleanPWD = $PWD.ProviderPath.TrimEnd(\"\\\")\n- $startInfo.Arguments = \"-config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n+ $startInfo.Arguments = \"-debug=\"\"$showDebug\"\" -config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n $startInfo.Environment[\"TERM\"] = \"xterm-256color\"\n $startInfo.CreateNoWindow = $true\n $startInfo.StandardOutputEncoding = [System.Text.Encoding]::UTF8\ndiff --git a/segment.go b/segment.go\nindex 27dd416..4015dac 100644\n--- a/segment.go\n+++ b/segment.go\n@@ -1,6 +1,9 @@\n package main\n \n-import \"errors\"\n+import (\n+\t\"errors\"\n+\t\"time\"\n+)\n \n // Segment represent a single segment and it's configuration\n type Segment struct {\n@@ -17,6 +20,7 @@ type Segment struct {\n \twriter SegmentWriter\n \tstringValue string\n \tactive bool\n+\ttiming time.Duration\n }\n \n // SegmentWriter is the interface used to define what and if to write to the prompt\n@@ -149,3 +153,26 @@ func (segment *Segment) mapSegmentWithWriter(env environmentInfo) error {\n \t}\n \treturn errors.New(\"unable to map writer\")\n }\n+\n+func (segment *Segment) setStringValue(env environmentInfo, cwd string, debug bool) {\n+\terr := segment.mapSegmentWithWriter(env)\n+\tif err != nil || segment.hasValue(IgnoreFolders, cwd) {\n+\t\treturn\n+\t}\n+\t// add timing only in debug\n+\tif debug {\n+\t\tstart := time.Now()\n+\t\tdefer (func() {\n+\t\t\t// force segment rendering to display the time it took\n+\t\t\t// to check if the segment is enabled or not\n+\t\t\t// depending on the segement, calling enabled()\n+\t\t\t// can be time consuming\n+\t\t\tsegment.active = true\n+\t\t\telapsed := time.Since(start)\n+\t\t\tsegment.timing = elapsed\n+\t\t})()\n+\t}\n+\tif segment.enabled() {\n+\t\tsegment.stringValue = segment.string()\n+\t}\n+}\n", "diff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 678683d..c4e2aec 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -224,6 +224,21 @@ def test_create_table_data(con, data, engine, temp_table):\n assert len(t.execute()) == 3\n \n \n+def test_create_table_with_properties(con, temp_table):\n+ data = pd.DataFrame({\"a\": list(\"abcde\" * 20), \"b\": [1, 2, 3, 4, 5] * 20})\n+ n = len(data)\n+ t = con.create_table(\n+ temp_table,\n+ data,\n+ schema=ibis.schema(dict(a=\"string\", b=\"!uint32\")),\n+ order_by=[\"a\", \"b\"],\n+ partition_by=[\"a\"],\n+ sample_by=[\"b\"],\n+ settings={\"allow_nullable_key\": \"1\"},\n+ )\n+ assert t.count().execute() == n\n+\n+\n @pytest.mark.parametrize(\n \"engine\",\n [\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 168f446..a4da961 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -28,6 +28,7 @@ pipeline {\n }\n \n stage('Verify') {\n+ failFast true\n parallel {\n stage('Tests') {\n steps {\n", "diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex 8b23fba..58a4c17 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -251,7 +251,7 @@ module.exports = {\n plugins: [\n argv.notypecheck\n ? null\n- : new ForkTsCheckerWebpackPlugin({tslint: true, async: false}),\n+ : new ForkTsCheckerWebpackPlugin({tslint: true}),\n // Makes some environment variables available in index.html.\n // The public URL is available as %PUBLIC_URL% in index.html, e.g.:\n // <link rel=\"shortcut icon\" href=\"%PUBLIC_URL%/favicon.ico\">\n", "diff --git a/src/background/audio-manager.ts b/src/background/audio-manager.ts\nindex 54e8b24..11c5fba 100644\n--- a/src/background/audio-manager.ts\n+++ b/src/background/audio-manager.ts\n@@ -2,7 +2,7 @@\n * To make sure only one audio plays at a time\n */\n \n-import { timeout } from '../_helpers/promise-more'\n+import { timeout } from '@/_helpers/promise-more'\n \n declare global {\n interface Window {\ndiff --git a/src/background/context-menus.ts b/src/background/context-menus.ts\nindex 994b59e..7036362 100644\n--- a/src/background/context-menus.ts\n+++ b/src/background/context-menus.ts\n@@ -1,5 +1,5 @@\n-import { storage, openURL } from '../_helpers/browser-api'\n-import { AppConfig } from '../app-config'\n+import { storage, openURL } from '@/_helpers/browser-api'\n+import { AppConfig } from '@/app-config'\n \n import { Observable } from 'rxjs/Observable'\n import { fromPromise } from 'rxjs/observable/fromPromise'\ndiff --git a/src/background/initialization.ts b/src/background/initialization.ts\nindex 0e5b3ad..001ee73 100644\n--- a/src/background/initialization.ts\n+++ b/src/background/initialization.ts\n@@ -1,6 +1,6 @@\n-import { storage, openURL } from '../_helpers/browser-api'\n-import checkUpdate from '../_helpers/check-update'\n-import { AppConfig } from '../app-config'\n+import { storage, openURL } from '@/_helpers/browser-api'\n+import checkUpdate from '@/_helpers/check-update'\n+import { AppConfig } from '@/app-config'\n import { mergeConfig } from './merge-config'\n import { init as initMenus } from './context-menus'\n import { init as initPdf } from './pdf-sniffer'\ndiff --git a/src/background/merge-config.ts b/src/background/merge-config.ts\nindex afa1800..afdbd63 100644\n--- a/src/background/merge-config.ts\n+++ b/src/background/merge-config.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import _ from 'lodash'\n \n /**\n@@ -24,7 +24,7 @@ function initConfig (): Promise<AppConfig> {\n const storageObj = { config: appConfigFactory() }\n \n Object.keys(storageObj.config.dicts.all).forEach(id => {\n- storageObj[id] = require('../components/dictionaries/' + id + '/config')\n+ storageObj[id] = require('@/components/dictionaries/' + id + '/config')\n })\n \n return browser.storage.sync.set(storageObj)\n@@ -70,7 +70,7 @@ function mergeHistorical (config): Promise<AppConfig> {\n \n const storageObj = { config: base }\n Object.keys(base.dicts.all).forEach(id => {\n- storageObj[id] = config.dicts.all[id] || require('../components/dictionaries/' + id + '/config')\n+ storageObj[id] = config.dicts.all[id] || require('@/components/dictionaries/' + id + '/config')\n })\n \n return browser.storage.sync.set(storageObj)\ndiff --git a/src/background/pdf-sniffer.ts b/src/background/pdf-sniffer.ts\nindex 6ba27cf..70aa38f 100644\n--- a/src/background/pdf-sniffer.ts\n+++ b/src/background/pdf-sniffer.ts\n@@ -2,8 +2,8 @@\n * Open pdf link directly\n */\n \n-import { storage } from '../_helpers/browser-api'\n-import { AppConfig } from '../app-config'\n+import { storage } from '@/_helpers/browser-api'\n+import { AppConfig } from '@/app-config'\n \n export function init (pdfSniff: boolean) {\n if (browser.webRequest.onBeforeRequest.hasListener(otherPdfListener)) {\ndiff --git a/src/background/server.ts b/src/background/server.ts\nindex 73b34b6..66ed5c0 100644\n--- a/src/background/server.ts\n+++ b/src/background/server.ts\n@@ -1,7 +1,7 @@\n-import { DictID } from '../app-config'\n-import { message, openURL } from '../_helpers/browser-api'\n+import { DictID } from '@/app-config'\n+import { message, openURL } from '@/_helpers/browser-api'\n import { play } from './audio-manager'\n-import { chsToChz } from '../_helpers/chs-to-chz'\n+import { chsToChz } from '@/_helpers/chs-to-chz'\n \n interface MessageOpenUrlWithEscape {\n type: 'OPEN_URL'\n@@ -63,7 +63,7 @@ function fetchDictResult (data: MessageFetchDictResult): Promise<void> {\n let search\n \n try {\n- search = require('../components/dictionaries/' + data.dict + '/engine.js')\n+ search = require('@/components/dictionaries/' + data.dict + '/engine.js')\n } catch (err) {\n return Promise.reject(err)\n }\ndiff --git a/test/unit/_helpers/browser-api.spec.ts b/test/unit/_helpers/browser-api.spec.ts\nindex 1f39145..e327169 100644\n--- a/test/unit/_helpers/browser-api.spec.ts\n+++ b/test/unit/_helpers/browser-api.spec.ts\n@@ -1,4 +1,4 @@\n-import { message, storage, openURL } from '../../../src/_helpers/browser-api'\n+import { message, storage, openURL } from '@/_helpers/browser-api'\n \n beforeEach(() => {\n browser.flush()\ndiff --git a/test/unit/_helpers/check-update.spec.ts b/test/unit/_helpers/check-update.spec.ts\nindex 2abfc57..fd0b678 100644\n--- a/test/unit/_helpers/check-update.spec.ts\n+++ b/test/unit/_helpers/check-update.spec.ts\n@@ -1,4 +1,4 @@\n-import checkUpdate from '../../../src/_helpers/check-update'\n+import checkUpdate from '@/_helpers/check-update'\n import fetchMock from 'jest-fetch-mock'\n \n describe('Check Update', () => {\ndiff --git a/test/unit/_helpers/chs-to-chz.spec.ts b/test/unit/_helpers/chs-to-chz.spec.ts\nindex 295c6ad..21d5229 100644\n--- a/test/unit/_helpers/chs-to-chz.spec.ts\n+++ b/test/unit/_helpers/chs-to-chz.spec.ts\n@@ -1,4 +1,4 @@\n-import chsToChz from '../../../src/_helpers/chs-to-chz'\n+import chsToChz from '@/_helpers/chs-to-chz'\n \n describe('Chs to Chz', () => {\n it('should convert chs to chz', () => {\ndiff --git a/test/unit/_helpers/fetch-dom.spec.ts b/test/unit/_helpers/fetch-dom.spec.ts\nindex a79dda0..bbfbf10 100644\n--- a/test/unit/_helpers/fetch-dom.spec.ts\n+++ b/test/unit/_helpers/fetch-dom.spec.ts\n@@ -1,4 +1,4 @@\n-import fetchDom from '../../../src/_helpers/fetch-dom'\n+import fetchDom from '@/_helpers/fetch-dom'\n \n class XMLHttpRequestMock {\n static queue: XMLHttpRequestMock[] = []\ndiff --git a/test/unit/_helpers/lang-check.spec.ts b/test/unit/_helpers/lang-check.spec.ts\nindex f3e668a..09f30bb 100644\n--- a/test/unit/_helpers/lang-check.spec.ts\n+++ b/test/unit/_helpers/lang-check.spec.ts\n@@ -1,4 +1,4 @@\n-import { isContainChinese, isContainEnglish } from '../../../src/_helpers/lang-check'\n+import { isContainChinese, isContainEnglish } from '@/_helpers/lang-check'\n \n describe('Language Check', () => {\n it('isContainChinese should return ture if text contains Chinese', () => {\ndiff --git a/test/unit/_helpers/promise-more.spec.ts b/test/unit/_helpers/promise-more.spec.ts\nindex 9601c7d..66dc8d9 100644\n--- a/test/unit/_helpers/promise-more.spec.ts\n+++ b/test/unit/_helpers/promise-more.spec.ts\n@@ -1,4 +1,4 @@\n-import * as pm from '../../../src/_helpers/promise-more'\n+import * as pm from '@/_helpers/promise-more'\n \n describe('Promise More', () => {\n beforeAll(() => {\ndiff --git a/test/unit/_helpers/selection.spec.ts b/test/unit/_helpers/selection.spec.ts\nindex 370239a..06812cf 100644\n--- a/test/unit/_helpers/selection.spec.ts\n+++ b/test/unit/_helpers/selection.spec.ts\n@@ -1,4 +1,4 @@\n-import selection from '../../../src/_helpers/selection'\n+import selection from '@/_helpers/selection'\n \n describe('Selection', () => {\n const bakSelection = window.getSelection\ndiff --git a/test/unit/_helpers/strip-script.spec.ts b/test/unit/_helpers/strip-script.spec.ts\nindex cce558f..355b382 100644\n--- a/test/unit/_helpers/strip-script.spec.ts\n+++ b/test/unit/_helpers/strip-script.spec.ts\n@@ -1,4 +1,4 @@\n-import stripScript from '../../../src/_helpers/strip-script'\n+import stripScript from '@/_helpers/strip-script'\n \n describe('Strip Script', () => {\n const expectedEl = document.createElement('div') as HTMLDivElement\ndiff --git a/test/unit/background/audio-manager.spec.ts b/test/unit/background/audio-manager.spec.ts\nindex b0096a6..b1266d7 100644\n--- a/test/unit/background/audio-manager.spec.ts\n+++ b/test/unit/background/audio-manager.spec.ts\n@@ -1,4 +1,4 @@\n-import audio from '../../../src/background/audio-manager'\n+import audio from '@/background/audio-manager'\n \n describe('Audio Manager', () => {\n const bakAudio = (window as any).Audio\ndiff --git a/test/unit/background/context-menus.spec.ts b/test/unit/background/context-menus.spec.ts\nindex 39e249c..d9049dc 100644\n--- a/test/unit/background/context-menus.spec.ts\n+++ b/test/unit/background/context-menus.spec.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import sinon from 'sinon'\n \n function specialConfig () {\n@@ -11,7 +11,7 @@ describe('Context Menus', () => {\n beforeAll(() => {\n browser.flush()\n jest.resetModules()\n- require('../../../src/background/context-menus')\n+ require('@/background/context-menus')\n })\n afterAll(() => browser.flush())\n \n@@ -93,7 +93,7 @@ describe('Context Menus', () => {\n browser.contextMenus.create.callsFake((_, cb) => cb())\n config = specialConfig()\n jest.resetModules()\n- const { init } = require('../../../src/background/context-menus')\n+ const { init } = require('@/background/context-menus')\n init(config.contextMenus)\n })\n \n@@ -110,7 +110,7 @@ describe('Context Menus', () => {\n it('should not init setup when called multiple times', () => {\n expect(browser.contextMenus.removeAll.calledOnce).toBeTruthy()\n \n- const { init } = require('../../../src/background/context-menus')\n+ const { init } = require('@/background/context-menus')\n init(config.contextMenus)\n init(config.contextMenus)\n \ndiff --git a/test/unit/background/initialization.spec.ts b/test/unit/background/initialization.spec.ts\nindex 7bc0972..56a6389 100644\n--- a/test/unit/background/initialization.spec.ts\n+++ b/test/unit/background/initialization.spec.ts\n@@ -1,4 +1,4 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n import fetchMock from 'jest-fetch-mock'\n import sinon from 'sinon'\n \n@@ -11,12 +11,12 @@ describe('Initialization', () => {\n const checkUpdate = jest.fn().mockReturnValue(Promise.resolve())\n \n beforeAll(() => {\n- const { message, storage } = require('../../../src/_helpers/browser-api')\n+ const { message, storage } = require('@/_helpers/browser-api')\n window.fetch = fetchMock\n \n browser.flush()\n jest.resetModules()\n- jest.doMock('../../../src/background/merge-config', () => {\n+ jest.doMock('@/background/merge-config', () => {\n return {\n mergeConfig (config) {\n mergeConfig(config)\n@@ -24,16 +24,16 @@ describe('Initialization', () => {\n }\n }\n })\n- jest.doMock('../../../src/background/context-menus', () => {\n+ jest.doMock('@/background/context-menus', () => {\n return { init: initMenus }\n })\n- jest.doMock('../../../src/background/pdf-sniffer', () => {\n+ jest.doMock('@/background/pdf-sniffer', () => {\n return { init: initPdf }\n })\n- jest.doMock('../../../src/_helpers/check-update', () => {\n+ jest.doMock('@/_helpers/check-update', () => {\n return checkUpdate\n })\n- jest.doMock('../../../src/_helpers/browser-api', () => {\n+ jest.doMock('@/_helpers/browser-api', () => {\n return {\n message,\n storage,\n@@ -41,13 +41,13 @@ describe('Initialization', () => {\n }\n })\n \n- require('../../../src/background/initialization')\n+ require('@/background/initialization')\n })\n afterAll(() => {\n browser.flush()\n- jest.dontMock('../../../src/background/merge-config')\n- jest.dontMock('../../../src/background/context-menus')\n- jest.dontMock('../../../src/_helpers/browser-api')\n+ jest.dontMock('@/background/merge-config')\n+ jest.dontMock('@/background/context-menus')\n+ jest.dontMock('@/_helpers/browser-api')\n window.fetch = bakFetch\n })\n \ndiff --git a/test/unit/background/merge-config.spec.ts b/test/unit/background/merge-config.spec.ts\nindex 73c047d..c0dce26 100644\n--- a/test/unit/background/merge-config.spec.ts\n+++ b/test/unit/background/merge-config.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig, AppConfigMutable } from '../../../src/app-config'\n-import mergeConfig from '../../../src/background/merge-config'\n+import { appConfigFactory, AppConfig, AppConfigMutable } from '@/app-config'\n+import mergeConfig from '@/background/merge-config'\n import sinon from 'sinon'\n \n describe('Merge Config', () => {\ndiff --git a/test/unit/background/pdf-sniffer.spec.ts b/test/unit/background/pdf-sniffer.spec.ts\nindex a0219d2..bb7726f 100644\n--- a/test/unit/background/pdf-sniffer.spec.ts\n+++ b/test/unit/background/pdf-sniffer.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n-import { init as initPdf } from '../../../src/background/pdf-sniffer'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n+import { init as initPdf } from '@/background/pdf-sniffer'\n import sinon from 'sinon'\n \n function hasListenerPatch (fn) {\ndiff --git a/test/unit/background/server.spec.ts b/test/unit/background/server.spec.ts\nindex b8ef065..aa04525 100644\n--- a/test/unit/background/server.spec.ts\n+++ b/test/unit/background/server.spec.ts\n@@ -1,5 +1,5 @@\n-import { appConfigFactory, AppConfig } from '../../../src/app-config'\n-import * as browserWrap from '../../../src/_helpers/browser-api'\n+import { appConfigFactory, AppConfig } from '@/app-config'\n+import * as browserWrap from '@/_helpers/browser-api'\n import sinon from 'sinon'\n \n describe('Server', () => {\n@@ -13,26 +13,26 @@ describe('Server', () => {\n browserWrap.openURL = openURL\n \n beforeAll(() => {\n- jest.doMock('../../../src/_helpers/chs-to-chz', () => {\n+ jest.doMock('@/_helpers/chs-to-chz', () => {\n return { chsToChz }\n })\n- jest.doMock('../../../src/background/audio-manager', () => {\n+ jest.doMock('@/background/audio-manager', () => {\n return { play }\n })\n- jest.doMock('../../../src/_helpers/browser-api', () => {\n+ jest.doMock('@/_helpers/browser-api', () => {\n return browserWrap\n })\n- jest.doMock('../../../src/components/dictionaries/bing/engine.js', () => {\n+ jest.doMock('@/components/dictionaries/bing/engine.js', () => {\n return bingSearch\n })\n })\n \n afterAll(() => {\n browser.flush()\n- jest.dontMock('../../../src/_helpers/chs-to-chz')\n- jest.dontMock('../../../src/background/audio-manager')\n- jest.dontMock('../../../src/_helpers/browser-api')\n- jest.dontMock('../../../src/components/dictionaries/bing/engine.js')\n+ jest.dontMock('@/_helpers/chs-to-chz')\n+ jest.dontMock('@/background/audio-manager')\n+ jest.dontMock('@/_helpers/browser-api')\n+ jest.dontMock('@/components/dictionaries/bing/engine.js')\n })\n \n beforeEach(() => {\n@@ -46,7 +46,7 @@ describe('Server', () => {\n bingSearch.mockReset()\n bingSearch.mockImplementation(() => Promise.resolve())\n jest.resetModules()\n- require('../../../src/background/server')\n+ require('@/background/server')\n })\n \n it('should properly init', () => {\n"]
5
["bea32587586ca08f390c901a95e9b9c25263f4df", "7e1ece7d3fd41d1e3ee38e479c119494bb269966", "28e623b294816c4e070971782a75c8697a11966f", "411be831591b2ea15ca9138eaf8db81f51b5101e", "8246d024f21d93cc092e19bede5f7b3a5325c8dc"]
["feat", "test", "ci", "build", "refactor"]
remove broken link Fixes #1785,implement array flatten support,reorder startup steps,permission check,add prewatch script to core
["diff --git a/docs/content/Caching/Caching.md b/docs/content/Caching/Caching.md\nindex d873a52..9706dda 100644\n--- a/docs/content/Caching/Caching.md\n+++ b/docs/content/Caching/Caching.md\n@@ -135,8 +135,9 @@ If nothing is found in the cache, the query is executed in the database and the \n is returned as well as updating the cache.\n \n If an existing value is present in the cache and the `refreshKey` value for\n-the query hasn't changed, the cached value will be returned. Otherwise, a\n-[query renewal](#in-memory-cache-force-query-renewal) will be performed.\n+the query hasn't changed, the cached value will be returned. Otherwise, a SQL query will be executed either against the pre-aggregations storage or the source database to populate the cache with the results and return them.\n+\n+\n \n ### Refresh Keys\n \n", "diff --git a/ibis/backends/snowflake/registry.py b/ibis/backends/snowflake/registry.py\nindex 2373dd7..4ce03b0 100644\n--- a/ibis/backends/snowflake/registry.py\n+++ b/ibis/backends/snowflake/registry.py\n@@ -422,6 +422,7 @@ operation_registry.update(\n ops.ArrayZip: _array_zip,\n ops.ArraySort: unary(sa.func.array_sort),\n ops.ArrayRepeat: fixed_arity(sa.func.ibis_udfs.public.array_repeat, 2),\n+ ops.ArrayFlatten: fixed_arity(sa.func.array_flatten, 1),\n ops.StringSplit: fixed_arity(sa.func.split, 2),\n # snowflake typeof only accepts VARIANT, so we cast\n ops.TypeOf: unary(lambda arg: sa.func.typeof(sa.func.to_variant(arg))),\n", "diff --git a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\nindex 52fa3a9..d81c27a 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\n@@ -50,21 +50,20 @@ public final class BrokerStartupProcess {\n // must be executed before any disk space usage listeners are registered\n result.add(new DiskSpaceUsageMonitorStep());\n }\n-\n result.add(new MonitoringServerStep());\n result.add(new BrokerAdminServiceStep());\n+\n result.add(new ClusterServicesCreationStep());\n+ result.add(new ClusterServicesStep());\n \n result.add(new CommandApiServiceStep());\n result.add(new SubscriptionApiStep());\n-\n- result.add(new ClusterServicesStep());\n+ result.add(new LeaderManagementRequestHandlerStep());\n \n if (config.getGateway().isEnable()) {\n result.add(new EmbeddedGatewayServiceStep());\n }\n \n- result.add(new LeaderManagementRequestHandlerStep());\n result.add(new PartitionManagerStep());\n \n return result;\n", "diff --git a/server/src/routes/course/index.ts b/server/src/routes/course/index.ts\nindex 557f5fb..bc0e490 100644\n--- a/server/src/routes/course/index.ts\n+++ b/server/src/routes/course/index.ts\n@@ -209,7 +209,7 @@ function addStudentApi(router: Router, logger: ILogger) {\n router.post('/student/:githubId/status', ...mentorValidators, updateStudentStatus(logger));\n router.post('/student/:githubId/status-self', courseGuard, selfUpdateStudentStatus(logger));\n router.get('/student/:githubId/score', courseGuard, getScoreByStudent(logger));\n- router.post('/student/:githubId/certificate', courseManagerGuard, ...validators, postStudentCertificate(logger));\n+ router.post('/student/:githubId/certificate', courseManagerGuard, validateGithubId, postStudentCertificate(logger));\n \n router.get('/students', courseSupervisorGuard, getStudents(logger));\n router.get('/students/csv', courseSupervisorGuard, getStudentsCsv(logger));\n", "diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n"]
5
["c351088bce98594c740a39546ce3655c91554a5d", "d3c754f09502be979e5dcc79f968b15052590bd0", "3e0c4cbf91fe5efc9b93baba93e4df93ef4ab5cd", "33c25b2f59c931a7f4af994365522221a7821dca", "aa0152baa4376b1087c86499a7c289b668d5ad55"]
["docs", "feat", "refactor", "fix", "build"]
auto focus inputs in survey form,selenium java 4.8.1,path correction Signed-off-by: Pranav C <[email protected]>,Template using kube api version Signed-off-by: rjshrjndrn <[email protected]>,common routine for browser timezone Signed-off-by: Raju Udava <[email protected]>
["diff --git a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\nindex b2a90d8..dbad824 100644\n--- a/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n+++ b/packages/nc-gui/pages/[projectType]/form/[viewId]/index/survey.vue\n@@ -6,6 +6,7 @@ import {\n DropZoneRef,\n computed,\n onKeyStroke,\n+ onMounted,\n provide,\n ref,\n useEventListener,\n@@ -85,6 +86,8 @@ function transition(direction: TransitionDirection) {\n \n setTimeout(() => {\n isTransitioning.value = false\n+\n+ setTimeout(focusInput, 100)\n }, 1000)\n }\n \n@@ -113,6 +116,19 @@ async function goPrevious() {\n goToPrevious()\n }\n \n+function focusInput() {\n+ if (document && typeof document !== 'undefined') {\n+ const inputEl =\n+ (document.querySelector('.nc-cell input') as HTMLInputElement) ||\n+ (document.querySelector('.nc-cell textarea') as HTMLTextAreaElement)\n+\n+ if (inputEl) {\n+ inputEl.select()\n+ inputEl.focus()\n+ }\n+ }\n+}\n+\n useEventListener('wheel', (event) => {\n if (Math.abs(event.deltaX) < Math.abs(event.deltaY)) {\n // Scrolling more vertically than horizontally\n@@ -130,6 +146,8 @@ useEventListener('wheel', (event) => {\n \n onKeyStroke(['ArrowLeft', 'ArrowDown'], goPrevious)\n onKeyStroke(['ArrowRight', 'ArrowUp', 'Enter', 'Space'], goNext)\n+\n+onMounted(focusInput)\n </script>\n \n <template>\n", "diff --git a/pom.xml b/pom.xml\nindex f792f3c..477224a 100644\n--- a/pom.xml\n+++ b/pom.xml\n@@ -60,8 +60,8 @@\n <codehaus-groovy.version>3.0.11</codehaus-groovy.version>\n <jython.version>2.7.0</jython.version>\n <docker-java.version>3.2.14</docker-java.version>\n- <selenium.version>4.8.0</selenium.version>\n- <jmeter-plugins-webdriver.version>4.8.0</jmeter-plugins-webdriver.version>\n+ <selenium.version>4.8.1</selenium.version>\n+ <jmeter-plugins-webdriver.version>4.8.1</jmeter-plugins-webdriver.version>\n <opentelemetry.version>1.22.0</opentelemetry.version>\n <oracle-database.version>19.7.0.0</oracle-database.version>\n <zookeeper.version>3.8.0</zookeeper.version>\n", "diff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\nindex 3afce9b..8425b00 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\n@@ -1,11 +1,9 @@\n import { promises as fs } from 'fs';\n import axios from 'axios';\n+import path from 'path'\n \n const sqliteFilePath = (parallelId: string) => {\n- const rootDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '',\n- );\n+ const rootDir = process.cwd()\n \n return `${rootDir}/test_sakila_${parallelId}.db`;\n };\n@@ -78,10 +76,10 @@ const deleteSqliteFileIfExists = async (parallelId: string) => {\n };\n \n const seedSakilaSqliteFile = async (parallelId: string) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '/tests',\n- );\n+ const testsDir = path.join(\n+ process.cwd(),\n+ 'tests'\n+ );;\n \n await fs.copyFile(\n `${testsDir}/sqlite-sakila-db/sakila.db`,\ndiff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\nindex 6bcd3f1..e4ed112 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\n@@ -1,4 +1,5 @@\n import { promises as fs } from 'fs';\n+import path from 'path';\n import axios from 'axios';\n import { knex } from 'knex';\n import Audit from '../../../models/Audit';\n@@ -85,10 +86,7 @@ const resetSakilaMysql = async (\n parallelId: string,\n isEmptyProject: boolean,\n ) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '/tests',\n- );\n+ const testsDir = path.join(process.cwd(), '/tests');\n \n try {\n await knex.raw(`DROP DATABASE test_sakila_${parallelId}`);\ndiff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\nindex 1a042c3..73923ef 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\n@@ -1,6 +1,7 @@\n import { promises as fs } from 'fs';\n import axios from 'axios';\n import { knex } from 'knex';\n+import path from 'path'\n import Audit from '../../../models/Audit';\n import type Project from '../../../models/Project';\n \n@@ -78,8 +79,8 @@ const isSakilaPgToBeReset = async (parallelId: string, project?: Project) => {\n };\n \n const resetSakilaPg = async (parallelId: string, isEmptyProject: boolean) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n+ const testsDir = path.join(\n+ process.cwd(),\n '/tests',\n );\n \n", "diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml\nindex c014f34..2a12e0d 100644\n--- a/.github/workflows/api-ee.yaml\n+++ b/.github/workflows/api-ee.yaml\n@@ -8,7 +8,7 @@ on:\n default: 'false'\n push:\n branches:\n- - dev\n+ - test_ci\n paths:\n - ee/api/**\n - api/**\n@@ -112,7 +112,8 @@ jobs:\n # Deploy command\n kubectl config set-context --namespace=app --current\n kubectl config get-contexts\n- helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -f -\n+ k_version=$(kubectl version --short 2>/dev/null | awk '/Server/{print $NF}')\n+ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -\n env:\n DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}\n # We're not passing -ee flag, because helm will add that.\n", "diff --git a/tests/playwright/tests/db/timezone.spec.ts b/tests/playwright/tests/db/timezone.spec.ts\nindex c966c2b..a30c7e4 100644\n--- a/tests/playwright/tests/db/timezone.spec.ts\n+++ b/tests/playwright/tests/db/timezone.spec.ts\n@@ -6,6 +6,7 @@ import { Api, UITypes } from 'nocodb-sdk';\n import { ProjectsPage } from '../../pages/ProjectsPage';\n import { isMysql, isPg, isSqlite } from '../../setup/db';\n import { getKnexConfig } from '../utils/config';\n+import { getBrowserTimezoneOffset } from '../utils/general';\n let api: Api<any>, records: any[];\n \n const columns = [\n@@ -680,11 +681,7 @@ test.describe.serial('External DB - DateTime column', async () => {\n await dashboard.rootPage.waitForTimeout(2000);\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n await dashboard.treeView.openBase({ title: 'datetimetable' });\n await dashboard.treeView.openTable({ title: 'MyTable' });\n@@ -844,11 +841,7 @@ test.describe('Ext DB MySQL : DB Timezone configured as HKT', () => {\n }\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n // connect after timezone is set\n await connectToExtDb(context);\ndiff --git a/tests/playwright/tests/utils/general.ts b/tests/playwright/tests/utils/general.ts\nindex 56a9e1a..45e9c6c 100644\n--- a/tests/playwright/tests/utils/general.ts\n+++ b/tests/playwright/tests/utils/general.ts\n@@ -50,4 +50,14 @@ function getDefaultPwd() {\n return 'Password123.';\n }\n \n-export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd };\n+function getBrowserTimezoneOffset() {\n+ // get timezone offset\n+ const timezoneOffset = new Date().getTimezoneOffset();\n+ const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n+ const minutes = Math.abs(timezoneOffset % 60);\n+ const sign = timezoneOffset <= 0 ? '+' : '-';\n+ const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ return formattedOffset;\n+}\n+\n+export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd, getBrowserTimezoneOffset };\n"]
5
["5373c3036866db58b322b424d3be9dedff57a376", "66f907f2d6ff0956bb5215518678bc79cab83c17", "974e033a3ca7484290a04201ee33856a25da0942", "c3531347fe5a4cc82d426db195026a5bdad15e7a", "7d3e9b3a98b02f6cb1f3444dc7e3a0459aeb26a7"]
["feat", "build", "fix", "ci", "test"]
support multi deploy in ReplayStateTest In order to support cases for call-activity, the ReplayStateTest needs to be able to deploy multiple processes.,move group logical op outside Signed-off-by: Pranav C <[email protected]>,document the use of export buckets for large pre-aggregations Co-authored-by: Ray Paik <[email protected]> Co-authored-by: Artyom Keydunov <[email protected]> Co-authored-by: Dmitry Patsura <[email protected]>,fix build,apply element migrated events This is a very straightforward event applier. All it needs to do is update the persisted data for the element instance using the data in the event.
["diff --git a/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java b/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\nindex 77e320f..0389291 100644\n--- a/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processing/streamprocessor/ReplayStateTest.java\n@@ -22,9 +22,9 @@ import io.zeebe.protocol.record.intent.ProcessInstanceIntent;\n import io.zeebe.protocol.record.value.BpmnElementType;\n import io.zeebe.test.util.record.RecordingExporter;\n import java.time.Duration;\n+import java.util.ArrayList;\n import java.util.Collection;\n import java.util.List;\n-import java.util.Optional;\n import java.util.function.Function;\n import org.assertj.core.api.SoftAssertions;\n import org.awaitility.Awaitility;\n@@ -170,7 +170,7 @@ public final class ReplayStateTest {\n @Test\n public void shouldRestoreState() {\n // given\n- testCase.process.ifPresent(process -> engine.deployment().withXmlResource(process).deploy());\n+ testCase.processes.forEach(process -> engine.deployment().withXmlResource(process).deploy());\n \n final Record<?> finalRecord = testCase.execution.apply(engine);\n \n@@ -227,7 +227,7 @@ public final class ReplayStateTest {\n \n private static final class TestCase {\n private final String description;\n- private Optional<BpmnModelInstance> process = Optional.empty();\n+ private final List<BpmnModelInstance> processes = new ArrayList<>();\n private Function<EngineRule, Record<?>> execution =\n engine -> RecordingExporter.records().getFirst();\n \n@@ -236,7 +236,7 @@ public final class ReplayStateTest {\n }\n \n private TestCase withProcess(final BpmnModelInstance process) {\n- this.process = Optional.of(process);\n+ processes.add(process);\n return this;\n }\n \n", "diff --git a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\nindex 5138589..f756981 100644\n--- a/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/components/ColumnFilter.vue\n@@ -2,40 +2,46 @@\n <div\n class=\"backgroundColor pa-2 menu-filter-dropdown\"\n :class=\"{ nested }\"\n- :style=\"{ width: nested ? '100%' : '530px' }\"\n+ :style=\"{ width: nested ? '100%' : '630px' }\"\n >\n <div class=\"grid\" @click.stop>\n <template v-for=\"(filter, i) in filters\" dense>\n <template v-if=\"filter.status !== 'delete'\">\n- <div v-if=\"filter.is_group\" :key=\"i\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n- <div class=\"d-flex\" style=\"gap: 6px; padding: 0 6px\">\n- <v-icon\n- v-if=\"!filter.readOnly\"\n- small\n- class=\"nc-filter-item-remove-btn\"\n- @click.stop=\"deleteFilter(filter, i)\"\n- >\n- mdi-close-box\n- </v-icon>\n- <span v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n- <v-select\n- v-else\n- v-model=\"filter.logical_op\"\n- class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n- :items=\"['and', 'or']\"\n- solo\n- flat\n- dense\n- hide-details\n- placeholder=\"Group op\"\n- @click.stop\n- @change=\"saveOrUpdate(filter, i)\"\n- >\n- <template #item=\"{ item }\">\n- <span class=\"caption font-weight-regular\">{{ item }}</span>\n- </template>\n- </v-select>\n- </div>\n+ <template v-if=\"filter.is_group\">\n+ <v-icon\n+ v-if=\"!filter.readOnly\"\n+ small\n+ class=\"nc-filter-item-remove-btn\"\n+ @click.stop=\"deleteFilter(filter, i)\"\n+ :key=\"i + '_1'\"\n+ >\n+ mdi-close-box\n+ </v-icon>\n+ <span v-else :key=\"i + '_1'\" />\n+\n+ <span :key=\"i + '_2'\" v-if=\"!i\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <v-select\n+ v-else\n+ :key=\"i + '_2'\"\n+ v-model=\"filter.logical_op\"\n+ class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n+ :items=\"['and', 'or']\"\n+ solo\n+ flat\n+ dense\n+ hide-details\n+ placeholder=\"Group op\"\n+ @click.stop\n+ @change=\"saveOrUpdate(filter, i)\"\n+ >\n+ <template #item=\"{ item }\">\n+ <span class=\"caption font-weight-regular\">{{ item }}</span>\n+ </template>\n+ </v-select>\n+ <span :key=\"i + '_3'\" style=\"grid-column: span 3\"></span>\n+ </template>\n+\n+ <div v-if=\"filter.is_group\" :key=\"i + '_4'\" style=\"grid-column: span 5; padding: 6px\" class=\"elevation-4\">\n <column-filter\n v-if=\"filter.id || shared\"\n ref=\"nestedFilter\"\n@@ -54,19 +60,19 @@\n <template v-else>\n <v-icon\n v-if=\"!filter.readOnly\"\n- :key=\"i + '_1'\"\n+ :key=\"i + '_5'\"\n small\n class=\"nc-filter-item-remove-btn\"\n @click.stop=\"deleteFilter(filter, i)\"\n >\n mdi-close-box\n </v-icon>\n- <span v-else :key=\"i + '_1'\" />\n- <span v-if=\"!i\" :key=\"i + '_2'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n+ <span v-else :key=\"i + '_5'\" />\n+ <span v-if=\"!i\" :key=\"i + '_6'\" class=\"caption d-flex align-center\">{{ $t('labels.where') }}</span>\n \n <v-select\n v-else\n- :key=\"i + '_2'\"\n+ :key=\"i + '_6'\"\n v-model=\"filter.logical_op\"\n class=\"flex-shrink-1 flex-grow-0 elevation-0 caption\"\n :items=\"['and', 'or']\"\n@@ -84,7 +90,7 @@\n </v-select>\n \n <field-list-auto-complete-dropdown\n- :key=\"i + '_3'\"\n+ :key=\"i + '_7'\"\n v-model=\"filter.fk_column_id\"\n class=\"caption nc-filter-field-select\"\n :columns=\"columns\"\n@@ -94,7 +100,7 @@\n />\n \n <v-select\n- :key=\"i + '_4'\"\n+ :key=\"i + '_8'\"\n v-model=\"filter.comparison_op\"\n class=\"flex-shrink-1 flex-grow-0 caption nc-filter-operation-select\"\n :items=\"filterComparisonOp(filter)\"\n@@ -114,11 +120,11 @@\n <span class=\"caption font-weight-regular\">{{ item.text }}</span>\n </template>\n </v-select>\n- <span v-else :key=\"i + '_4'\"></span>\n+ <span v-else :key=\"i + '_8'\"></span>\n <span v-if=\"['null', 'notnull', 'empty', 'notempty'].includes(filter.comparison_op)\" :key=\"i + '_5'\" />\n <v-checkbox\n v-else-if=\"types[filter.field] === 'boolean'\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n dense\n :disabled=\"filter.readOnly\"\n@@ -126,7 +132,7 @@\n />\n <v-text-field\n v-else-if=\"filter && filter.fk_column_id\"\n- :key=\"i + '_5'\"\n+ :key=\"i + '_9'\"\n v-model=\"filter.value\"\n solo\n flat\n@@ -137,7 +143,7 @@\n @click.stop\n @input=\"saveOrUpdate(filter, i)\"\n />\n- <span v-else :key=\"i + '_5'\"></span>\n+ <span v-else :key=\"i + '_9'\"></span>\n </template>\n </template>\n </template>\n@@ -411,6 +417,7 @@ export default {\n parentId: this.parentId,\n is_group: true,\n status: 'update',\n+ logical_op: 'and',\n });\n this.filters = this.filters.slice();\n const index = this.filters.length - 1;\n@@ -478,4 +485,8 @@ export default {\n column-gap: 6px;\n row-gap: 6px;\n }\n+\n+.nc-filter-value-select {\n+ min-width: 100px;\n+}\n </style>\n", "diff --git a/docs/content/Caching/Using-Pre-Aggregations.md b/docs/content/Caching/Using-Pre-Aggregations.md\nindex 7882a25..a927241 100644\n--- a/docs/content/Caching/Using-Pre-Aggregations.md\n+++ b/docs/content/Caching/Using-Pre-Aggregations.md\n@@ -65,8 +65,8 @@ In development mode, Cube.js enables background refresh by default and will\n refresh all pre-aggregations marked with the\n [`scheduledRefresh`](/pre-aggregations#scheduled-refresh) parameter.\n \n-Please consult the [Production Checklist][ref-production-checklist-refresh] for\n-best practices on running background refresh in production environments.\n+Please consult the [Production Checklist][ref-prod-list-refresh] for best\n+practices on running background refresh in production environments.\n \n ```js\n cube(`Orders`, {\n@@ -193,10 +193,20 @@ CUBEJS_EXT_DB_TYPE=<SUPPORTED_DB_TYPE_HERE>\n \n <!-- prettier-ignore-start -->\n [[warning |]]\n-| Please be aware of the limitations when using internal and external (outside of Cube Store) pre-aggregations.\n+| Please be aware of the limitations when using internal and external (outside\n+| of Cube Store) pre-aggregations.\n <!-- prettier-ignore-end -->\n \n-![](https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Caching/pre-aggregations.png)\n+<div\n+ style=\"text-align: center\"\n+>\n+ <img\n+ alt=\"Internal vs External vs External with Cube Store diagram\"\n+ src=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Caching/pre-aggregations.png\"\n+ style=\"border: none\"\n+ width=\"100%\"\n+ />\n+</div>\n \n #### Some known limitations when using Postgres/MySQL as a storage layer listed below.\n \n@@ -245,15 +255,75 @@ slow to return results.\n (such as AWS Athena and BigQuery). Repeatedly querying for this data can easily\n rack up costs.\n \n+## Optimizing Pre-Aggregation Build Times\n+\n+<!-- prettier-ignore-start -->\n+[[info | ]]\n+| For ideal performance, pre-aggregations should be built using a dedicated\n+| Refresh Worker. [See here for more details][ref-prod-list-refresh].\n+<!-- prettier-ignore-end -->\n+\n+By default, Cube.js will use the source database as a temporary staging area for\n+writing pre-aggregations to determine column types. The data is loaded back into\n+memory before writing them to Cube Store (or an external database).\n+\n+![](build-regular.png)\n+\n+If the dataset is large (more than 100k rows), then Cube.js can face issues when\n+the Node runtime runs out of memory.\n+\n+### Batching\n+\n+Batching is a more performant strategy where Cube.js sends compressed CSVs for\n+Cube Store to ingest.\n+\n+![](build-batching.png)\n+\n+The performance scales to the amount of memory available on the Cube.js\n+instance. Support is currently available for:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [MySQL][ref-connect-db-mysql]\n+- [Postgres][ref-connect-db-postgres]\n+\n+### Export bucket\n+\n+When dealing with larger pre-aggregations (more than 100k rows), performance can\n+be significantly improved by using an export bucket. This allows the source\n+database to persist data directly into cloud storage, which is then loaded into\n+Cube Store in parallel:\n+\n+![](build-export-bucket.png)\n+\n+Export buckets are currently supported for the following databases:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [Snowflake][ref-connect-db-snowflake]\n+\n+When using cloud storage, it is important to correctly configure any data\n+retention policies to clean up the data in the export bucket as Cube.js does not\n+currently manage this. For most use-cases, 1 day is sufficient.\n+\n [wiki-partitioning]: https://en.wikipedia.org/wiki/Partition_(database)\n+[ref-config-connect-db]: /connecting-to-the-database\n+[ref-config-env]: /reference/environment-variables#cube-store\n+[ref-connect-db-athena]: /connecting-to-the-database#notes-aws-athena\n+[ref-connect-db-redshift]: /connecting-to-the-database#notes-aws-redshift\n+[ref-connect-db-bigquery]: /connecting-to-the-database#notes-google-big-query\n+[ref-connect-db-mysql]: /connecting-to-the-database#notes-my-sql\n+[ref-connect-db-postgres]: /connecting-to-the-database#notes-aws-rds-postgres\n+[ref-connect-db-snowflake]: /connecting-to-the-database#notes-snowflake\n [ref-schema-timedimension]: /types-and-formats#dimensions-types-time\n [ref-preaggs]: /pre-aggregations\n [ref-preagg-sched-refresh]: /pre-aggregations#scheduled-refresh\n [ref-preagg-time-part]: /pre-aggregations#rollup-time-partitioning\n [ref-preagg-segment-part]: /pre-aggregations#rollup-segment-partitioning\n [ref-preaggs-refresh-key]: /pre-aggregations#refresh-key\n+[ref-prod-list-refresh]: /deployment/production-checklist#set-up-refresh-worker\n [ref-config-extdbtype]: /config#options-reference-external-db-type\n [ref-config-driverfactory]: /config#options-reference-driver-factory\n [ref-config-extdriverfactory]: /config#options-reference-external-driver-factory\n-[ref-production-checklist-refresh]:\n- /deployment/production-checklist#set-up-refresh-worker\ndiff --git a/docs/content/Caching/build-batching.png b/docs/content/Caching/build-batching.png\nnew file mode 100755\nindex 0000000..d1e28b3\nBinary files /dev/null and b/docs/content/Caching/build-batching.png differ\ndiff --git a/docs/content/Caching/build-export-bucket.png b/docs/content/Caching/build-export-bucket.png\nnew file mode 100755\nindex 0000000..7da2425\nBinary files /dev/null and b/docs/content/Caching/build-export-bucket.png differ\ndiff --git a/docs/content/Caching/build-regular.png b/docs/content/Caching/build-regular.png\nnew file mode 100644\nindex 0000000..af4c3a2\nBinary files /dev/null and b/docs/content/Caching/build-regular.png differ\ndiff --git a/docs/content/Configuration/Connecting-to-the-Database.md b/docs/content/Configuration/Connecting-to-the-Database.md\nindex 321518f..a16ccc4 100644\n--- a/docs/content/Configuration/Connecting-to-the-Database.md\n+++ b/docs/content/Configuration/Connecting-to-the-Database.md\n@@ -49,20 +49,21 @@ CUBEJS_API_SECRET=secret\n The table below shows which environment variables are used for different\n databases:\n \n-| Database | Credentials |\n-| ------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n-| PostgreSQL, MySQL, AWS Redshift, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n-| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n-| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n-| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, `CUBEJS_DB_BQ_EXPORT_BUCKET` |\n-| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n-| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n-| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n-| SQLite | `CUBEJS_DB_NAME` |\n-| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n-| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n+| Database | Credentials |\n+| ---------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n+| PostgreSQL, MySQL, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| AWS Redshift | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, |\n+| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n+| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n+| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n+| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, |\n+| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n+| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n+| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n+| SQLite | `CUBEJS_DB_NAME` |\n+| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n+| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n \n ## Multiple Databases\n \n@@ -195,18 +196,25 @@ You can learn more about acquiring Google BigQuery credentials\n [here][link-bigquery-getting-started] and [here][link-bigquery-credentials].\n \n You can set the dataset location using the `CUBEJS_DB_BQ_LOCATION` environment\n-variable.\n+variable. All supported regions [can be found\n+here][link-bigquery-regional-locations].\n \n ```dotenv\n CUBEJS_DB_BQ_LOCATION=us-central1\n ```\n \n-You can find more supported regions [here][link-bigquery-regional-locations].\n+#### Configuring an export bucket\n \n-If your pre-aggregations dataset is too big to fit in memory, we **strongly**\n-recommend configuring `CUBEJS_DB_BQ_EXPORT_BUCKET`. This will allow Cube.js to\n-materialize results on an \"export\" bucket which are then loaded into BigQuery,\n-providing better performance.\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| BigQuery only supports using Google Cloud Storage for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### Google Cloud Storage\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n \n <!-- prettier-ignore-start -->\n [[info |]]\n@@ -216,7 +224,8 @@ providing better performance.\n <!-- prettier-ignore-end -->\n \n ```dotenv\n-CUBEJS_DB_BQ_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n ```\n \n ### MSSQL\n@@ -279,6 +288,73 @@ To connect to a Elasticsearch database, use `CUBEJS_DB_URL` with the username\n and password embedded in the URL, if required. If you're not using Elastic\n Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n \n+### AWS Redshift\n+\n+#### Configuring an export bucket\n+\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| AWS Redshift only supports using AWS S3 for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### AWS S3\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+### Snowflake\n+\n+#### Configuring an export bucket\n+\n+Snowflake supports using both AWS S3 and Google Cloud Storage for export bucket\n+functionality.\n+\n+##### AWS S3\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+##### Google Cloud Storage\n+\n+Before configuring Cube.js, an [integration must be created and configured in\n+Snowflake][link-snowflake-gcs-integration]. Take note of the integration name\n+(`gcs_int` from the example link) as you'll need it to configure Cube.js.\n+\n+Once the Snowflake integration is set up, configure Cube.js using the following:\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET=snowflake-export-bucket\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n+CUBEJS_DB_EXPORT_GCS_CREDENTIALS=<BASE64_ENCODED_SERVICE_CREDENTIALS_JSON\n+CUBEJS_DB_EXPORT_INTEGRATION=gcs_int\n+```\n+\n [link-java-guide]:\n https://github.com/cube-js/cube.js/blob/master/packages/cubejs-jdbc-driver/README.md#java-installation\n [link-cubejs-driver-guide]:\n@@ -300,8 +376,11 @@ Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n https://console.cloud.google.com/apis/credentials/serviceaccountkey\n [link-heroku-postgres-issue]:\n https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl\n+[link-snowflake-gcs-integration]:\n+ https://docs.snowflake.com/en/user-guide/data-load-gcs-config.html\n+[link-bigquery-regional-locations]:\n+ https://cloud.google.com/bigquery/docs/locations#regional-locations\n [ref-cubejs-cli]: /using-the-cubejs-cli\n [ref-enabling-ssl]: #enabling-ssl\n [ref-env-var]: /reference/environment-variables#database-connection\n-[link-bigquery-regional-locations]:\n- https://cloud.google.com/bigquery/docs/locations#regional-locations\n+[ref-caching-large-preaggs]: /using-pre-aggregations#large-pre-aggregations\ndiff --git a/docs/content/Configuration/Environment-Variables-Reference.md b/docs/content/Configuration/Environment-Variables-Reference.md\nindex 692d2c7..6888697 100644\n--- a/docs/content/Configuration/Environment-Variables-Reference.md\n+++ b/docs/content/Configuration/Environment-Variables-Reference.md\n@@ -124,6 +124,18 @@ databases [in this guide][link-connecting-to-db].\n | `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` | Snowflake | The password for the private RSA key. Only required for encrypted keys | A valid password for the encrypted private RSA key |\n | `CUBEJS_DB_DATABRICKS_URL` | Databricks | The URL for a JDBC connection | A valid JDBC URL |\n \n+## Export Bucket\n+\n+| Environment variable | Description | Possible Values |\n+| ------------------------------------ | -------------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |\n+| `CUBEJS_DB_EXPORT_BUCKET` | The name of a bucket in cloud storage | `exports-20210505` |\n+| `CUBEJS_DB_EXPORT_BUCKET_TYPE` | The cloud provider where the bucket is hosted | `gcs`, `s3` |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_KEY` | The AWS Access Key ID to use for the export bucket | A valid AWS Access Key ID |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET` | The AWS Secret Access Key to use for the export bucket | A valid AWS Secret Access Key |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_REGION` | The AWS region of the export bucket | [A valid AWS region][link-aws-regions] |\n+| `CUBEJS_DB_EXPORT_GCS_CREDENTIALS` | A Base64 encoded JSON key file for connecting to Google Cloud | A valid Google Cloud JSON key file encoded as a Base64 string |\n+| `CUBEJS_DB_EXPORT_INTEGRATION` | The name of the integration used in the database. Only required when using Snowflake and GCS | A valid string matching the name of the integration in Snowflake |\n+\n ## Cube Store\n \n | Environment variable | Description | Possible Values |\n", "diff --git a/server/Dockerfile b/server/Dockerfile\nindex 2f203bb..a84c31e 100755\n--- a/server/Dockerfile\n+++ b/server/Dockerfile\n@@ -9,9 +9,11 @@ ENV TZ utc\n WORKDIR /src\n \n COPY package.json /src\n+COPY package-lock.json /src\n+COPY tsconfig.json /src\n RUN npm install --production --no-optional\n \n COPY public /src/public\n COPY dist /src\n \n-CMD [ \"node\", \"./server/index.js\" ]\n+CMD [ \"node\", \"-r\", \"tsconfig-paths/register\", \"./server/index.js\" ]\ndiff --git a/server/package-lock.json b/server/package-lock.json\nindex 6cacfa2..236f1bb 100644\n--- a/server/package-lock.json\n+++ b/server/package-lock.json\n@@ -2164,8 +2164,7 @@\n \"@types/json5\": {\n \"version\": \"0.0.29\",\n \"resolved\": \"https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz\",\n- \"integrity\": \"sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\",\n- \"dev\": true\n+ \"integrity\": \"sha1-7ihweulOEdK4J7y+UnC86n8+ce4=\"\n },\n \"@types/jsonwebtoken\": {\n \"version\": \"8.3.5\",\n@@ -9246,8 +9245,7 @@\n \"strip-bom\": {\n \"version\": \"3.0.0\",\n \"resolved\": \"https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz\",\n- \"integrity\": \"sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=\",\n- \"dev\": true\n+ \"integrity\": \"sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=\"\n },\n \"strip-final-newline\": {\n \"version\": \"2.0.0\",\n@@ -9524,7 +9522,6 @@\n \"version\": \"3.9.0\",\n \"resolved\": \"https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz\",\n \"integrity\": \"sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==\",\n- \"dev\": true,\n \"requires\": {\n \"@types/json5\": \"^0.0.29\",\n \"json5\": \"^1.0.1\",\n@@ -9536,7 +9533,6 @@\n \"version\": \"1.0.1\",\n \"resolved\": \"https://registry.npmjs.org/json5/-/json5-1.0.1.tgz\",\n \"integrity\": \"sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==\",\n- \"dev\": true,\n \"requires\": {\n \"minimist\": \"^1.2.0\"\n }\n@@ -9544,8 +9540,7 @@\n \"minimist\": {\n \"version\": \"1.2.5\",\n \"resolved\": \"https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz\",\n- \"integrity\": \"sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==\",\n- \"dev\": true\n+ \"integrity\": \"sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==\"\n }\n }\n },\ndiff --git a/server/package.json b/server/package.json\nindex 35426e9..896e9b3 100644\n--- a/server/package.json\n+++ b/server/package.json\n@@ -41,6 +41,7 @@\n \"pino-cloudwatch\": \"0.7.0\",\n \"pino-multi-stream\": \"4.2.0\",\n \"reflect-metadata\": \"0.1.13\",\n+ \"tsconfig-paths\": \"3.9.0\",\n \"typeorm\": \"0.2.37\"\n },\n \"devDependencies\": {\n@@ -69,7 +70,6 @@\n \"pino-pretty\": \"3.6.1\",\n \"ts-jest\": \"27.0.7\",\n \"ts-node-dev\": \"1.1.8\",\n- \"tsconfig-paths\": \"3.9.0\",\n \"typescript\": \"4.3.5\"\n },\n \"jest-junit\": {\n", "diff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\nindex da05e13..9231df3 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n@@ -154,6 +154,9 @@ public final class EventAppliers implements EventApplier {\n register(\n ProcessInstanceIntent.SEQUENCE_FLOW_TAKEN,\n new ProcessInstanceSequenceFlowTakenApplier(elementInstanceState, processState));\n+ register(\n+ ProcessInstanceIntent.ELEMENT_MIGRATED,\n+ new ProcessInstanceElementMigratedApplier(elementInstanceState));\n }\n \n private void registerProcessInstanceCreationAppliers(final MutableProcessingState state) {\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\nindex e5a0f3a..d38358f 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n@@ -24,5 +24,16 @@ final class ProcessInstanceElementMigratedApplier\n }\n \n @Override\n- public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {}\n+ public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {\n+ elementInstanceState.updateInstance(\n+ elementInstanceKey,\n+ elementInstance ->\n+ elementInstance\n+ .getValue()\n+ .setProcessDefinitionKey(value.getProcessDefinitionKey())\n+ .setBpmnProcessId(value.getBpmnProcessId())\n+ .setVersion(value.getVersion())\n+ .setElementId(value.getElementId())\n+ .setFlowScopeKey(value.getFlowScopeKey()));\n+ }\n }\n"]
5
["834a9e3cfd6439f295072e5808a02cf2a35ba083", "4f86f2570b274c45605cc59d9adb38f7ed30cd17", "81f37be838d5e3af738908b1bcbf59fea2b45989", "a827777f41e90b6332c191d05bae8db525de6f38", "39d5d1cfe8d2210305df2c8fab4a4ae430732cf7"]
["test", "refactor", "docs", "build", "feat"]
wire up fixed null encoding,path correction Signed-off-by: Pranav C <[email protected]>,rename step,add title to badge icon,use new freespace config for disk space recory test
["diff --git a/read_buffer/src/row_group.rs b/read_buffer/src/row_group.rs\nindex 91c9fb5..ca77f3c 100644\n--- a/read_buffer/src/row_group.rs\n+++ b/read_buffer/src/row_group.rs\n@@ -958,24 +958,15 @@ impl From<RecordBatch> for RowGroup {\n }\n Some(InfluxColumnType::Field(_)) => {\n let column_data = match arrow_column.data_type() {\n- arrow::datatypes::DataType::Int64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Int64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::Float64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::Float64Array>()\n- .unwrap(),\n- ),\n- arrow::datatypes::DataType::UInt64 => Column::from(\n- arrow_column\n- .as_any()\n- .downcast_ref::<arrow::array::UInt64Array>()\n- .unwrap(),\n- ),\n+ arrow::datatypes::DataType::Int64 => {\n+ Column::from(arrow::array::Int64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::Float64 => {\n+ Column::from(arrow::array::Float64Array::from(arrow_column.data()))\n+ }\n+ arrow::datatypes::DataType::UInt64 => {\n+ Column::from(arrow::array::UInt64Array::from(arrow_column.data()))\n+ }\n dt => unimplemented!(\n \"data type {:?} currently not supported for field columns\",\n dt\n", "diff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\nindex 3afce9b..8425b00 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetMetaSakilaSqliteProject.ts\n@@ -1,11 +1,9 @@\n import { promises as fs } from 'fs';\n import axios from 'axios';\n+import path from 'path'\n \n const sqliteFilePath = (parallelId: string) => {\n- const rootDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '',\n- );\n+ const rootDir = process.cwd()\n \n return `${rootDir}/test_sakila_${parallelId}.db`;\n };\n@@ -78,10 +76,10 @@ const deleteSqliteFileIfExists = async (parallelId: string) => {\n };\n \n const seedSakilaSqliteFile = async (parallelId: string) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '/tests',\n- );\n+ const testsDir = path.join(\n+ process.cwd(),\n+ 'tests'\n+ );;\n \n await fs.copyFile(\n `${testsDir}/sqlite-sakila-db/sakila.db`,\ndiff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\nindex 6bcd3f1..e4ed112 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetMysqlSakilaProject.ts\n@@ -1,4 +1,5 @@\n import { promises as fs } from 'fs';\n+import path from 'path';\n import axios from 'axios';\n import { knex } from 'knex';\n import Audit from '../../../models/Audit';\n@@ -85,10 +86,7 @@ const resetSakilaMysql = async (\n parallelId: string,\n isEmptyProject: boolean,\n ) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n- '/tests',\n- );\n+ const testsDir = path.join(process.cwd(), '/tests');\n \n try {\n await knex.raw(`DROP DATABASE test_sakila_${parallelId}`);\ndiff --git a/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts b/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\nindex 1a042c3..73923ef 100644\n--- a/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\n+++ b/packages/nocodb-nest/src/modules/test/TestResetService/resetPgSakilaProject.ts\n@@ -1,6 +1,7 @@\n import { promises as fs } from 'fs';\n import axios from 'axios';\n import { knex } from 'knex';\n+import path from 'path'\n import Audit from '../../../models/Audit';\n import type Project from '../../../models/Project';\n \n@@ -78,8 +79,8 @@ const isSakilaPgToBeReset = async (parallelId: string, project?: Project) => {\n };\n \n const resetSakilaPg = async (parallelId: string, isEmptyProject: boolean) => {\n- const testsDir = __dirname.replace(\n- '/src/modules/test/TestResetService',\n+ const testsDir = path.join(\n+ process.cwd(),\n '/tests',\n );\n \n", "diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n", "diff --git a/kibbeh/src/modules/room/chat/RoomChatList.tsx b/kibbeh/src/modules/room/chat/RoomChatList.tsx\nindex a7418e6..805a9a4 100644\n--- a/kibbeh/src/modules/room/chat/RoomChatList.tsx\n+++ b/kibbeh/src/modules/room/chat/RoomChatList.tsx\n@@ -16,6 +16,11 @@ interface ChatListProps {\n users: RoomUser[];\n }\n \n+interface BadgeIconData {\n+ emoji: string,\n+ title: string\n+}\n+\n export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const { setData } = useContext(UserPreviewModalContext);\n const { messages, toggleFrozen } = useRoomChatStore();\n@@ -48,11 +53,14 @@ export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const getBadgeIcon = (m: Message) => {\n const user = users.find((u) => u.id === m.userId);\n const isSpeaker = room.creatorId === user?.id || user?.roomPermissions?.isSpeaker;\n- let emoji = null;\n+ let badgeIconData: BadgeIconData | null = null;\n if (isSpeaker) {\n- emoji = \"\ud83d\udce3\";\n+ badgeIconData = {\n+ emoji: \"\ud83d\udce3\",\n+ title: \"Speaker\"\n+ };\n }\n- return emoji && <Twemoji text={emoji} style={{ marginRight: \"1ch\" }}/>;\n+ return badgeIconData && <Twemoji text={badgeIconData.emoji} title={badgeIconData.title} style={{ marginRight: \"1ch\" }}/>;\n };\n \n return (\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n"]
5
["28b596b8834d1b51be3ac6a2ac30df28f37702d8", "974e033a3ca7484290a04201ee33856a25da0942", "34875bc0e59b43d9041903101c823d25ec194a21", "6e5098655e6d9bb13f6423abe780cdf6b50ff13a", "672cd2b9775fb6dac2d522cb3f4469db47c0556b"]
["refactor", "fix", "ci", "feat", "test"]
Remove hasmany and belongsto from context menu Signed-off-by: Pranav C <[email protected]>,add unit test for query API,temporary do no run "verify-ffmpeg.py" on Mac CI (#14986),allow users to share their playground session,removed files
["diff --git a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\nindex 5bc6f67..aaa297c 100644\n--- a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n@@ -261,37 +261,7 @@\n :size=\"size\"\n @input=\"loadTableData\"\n />\n- <!-- <v-pagination\n- v-if=\"count !== Infinity\"\n- style=\"max-width: 100%\"\n- v-model=\"page\"\n- :length=\"Math.ceil(count / size)\"\n- :total-visible=\"8\"\n- @input=\"loadTableData\"\n- color=\"primary lighten-2\"\n- ></v-pagination>\n- <div v-else class=\"mx-auto d-flex align-center mt-n1 \" style=\"max-width:250px\">\n- <span class=\"caption\" style=\"white-space: nowrap\"> Change page:</span>\n- <v-text-field\n- class=\"ml-1 caption\"\n- :full-width=\"false\"\n- outlined\n- dense\n- hide-details\n- v-model=\"page\"\n- @keydown.enter=\"loadTableData\"\n- type=\"number\"\n- >\n- <template #append>\n- <x-icon tooltip=\"Change page\" small icon.class=\"mt-1\" @click=\"loadTableData\">mdi-keyboard-return\n- </x-icon>\n- </template>\n- </v-text-field>\n- </div>-->\n </template>\n- <!-- <div v-else class=\"d-flex justify-center py-4\">-->\n- <!-- <v-alert type=\"info\" dense class=\"ma-1 flex-shrink-1\">Table is empty</v-alert>-->\n- <!-- </div>-->\n </div>\n \n <spreadsheet-nav-drawer\n@@ -414,9 +384,9 @@\n <span class=\"caption\">Delete Selected Rows</span>\n </v-list-item>\n </template>\n- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n+ <!-- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n <v-divider v-if=\"isEditable && !isLocked\" />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Has Many</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Has Many</span>\n \n <v-list-item v-for=\"(hm,i) in meta.hasMany\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(hm,'hm')\">\n <span class=\"caption text-capitalize\">{{ hm._tn }}</span>\n@@ -425,12 +395,12 @@\n \n <template v-if=\"meta.belongsTo && meta.belongsTo.length\">\n <v-divider />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Belongs To</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Belongs To</span>\n \n <v-list-item v-for=\"(bt,i) in belongsTo\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(bt,'bt')\">\n <span class=\"caption text-capitalize\">{{ bt._rtn }}</span>\n </v-list-item>\n- </template>\n+ </template>-->\n </v-list>\n </v-menu>\n <v-dialog\n", "diff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java b/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\nindex 2d2d084..38261ad 100644\n--- a/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\n@@ -25,6 +25,7 @@ import java.util.HashMap;\n import java.util.List;\n import java.util.Map;\n import java.util.concurrent.CompletableFuture;\n+import java.util.concurrent.TimeUnit;\n import java.util.function.Consumer;\n \n public final class StubbedBrokerClient implements BrokerClient {\n@@ -67,7 +68,15 @@ public final class StubbedBrokerClient implements BrokerClient {\n @Override\n public <T> CompletableFuture<BrokerResponse<T>> sendRequestWithRetry(\n final BrokerRequest<T> request, final Duration requestTimeout) {\n- throw new UnsupportedOperationException(\"not implemented\");\n+ final CompletableFuture<BrokerResponse<T>> result = new CompletableFuture<>();\n+\n+ sendRequestWithRetry(\n+ request,\n+ (key, response) ->\n+ result.complete(new BrokerResponse<>(response, Protocol.decodePartitionId(key), key)),\n+ result::completeExceptionally);\n+\n+ return result.orTimeout(requestTimeout.toNanos(), TimeUnit.NANOSECONDS);\n }\n \n @Override\ndiff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java\nnew file mode 100644\nindex 0000000..ec9ec80\n--- /dev/null\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java\n@@ -0,0 +1,91 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.gateway.query;\n+\n+import static org.assertj.core.api.Assertions.assertThat;\n+\n+import io.camunda.zeebe.gateway.api.util.GatewayTest;\n+import io.camunda.zeebe.gateway.cmd.BrokerErrorException;\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerError;\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerErrorResponse;\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerResponse;\n+import io.camunda.zeebe.gateway.query.impl.QueryApiImpl;\n+import io.camunda.zeebe.protocol.Protocol;\n+import io.camunda.zeebe.protocol.record.ErrorCode;\n+import java.time.Duration;\n+import java.util.concurrent.CompletionStage;\n+import java.util.concurrent.ExecutionException;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.junit.runners.Parameterized;\n+import org.junit.runners.Parameterized.Parameter;\n+import org.junit.runners.Parameterized.Parameters;\n+\n+@RunWith(Parameterized.class)\n+public final class QueryApiTest extends GatewayTest {\n+ @Parameter(0)\n+ public String name;\n+\n+ @Parameter(1)\n+ public Querier querier;\n+\n+ @Parameters(name = \"{index}: {0}\")\n+ public static Object[][] queries() {\n+ return new Object[][] {\n+ new Object[] {\"getBpmnProcessIdForProcess\", (Querier) QueryApi::getBpmnProcessIdFromProcess},\n+ new Object[] {\n+ \"getBpmnProcessIdForProcessInstance\",\n+ (Querier) QueryApi::getBpmnProcessIdFromProcessInstance\n+ },\n+ new Object[] {\"getBpmnProcessIdForProcessJob\", (Querier) QueryApi::getBpmnProcessIdFromJob},\n+ };\n+ }\n+\n+ @Test\n+ public void shouldGetBpmnProcessId() {\n+ // given\n+ final var key = Protocol.encodePartitionId(1, 1);\n+ final var api = new QueryApiImpl(brokerClient);\n+ final var timeout = Duration.ofSeconds(5);\n+ final var stub = new QueryStub(new BrokerResponse<>(\"myProcess\", 1, 1));\n+ stub.registerWith(brokerClient);\n+\n+ // when\n+ final var result = querier.query(api, key, timeout);\n+\n+ // then\n+ assertThat(result).succeedsWithin(timeout).isEqualTo(\"myProcess\");\n+ }\n+\n+ @Test\n+ public void shouldCompleteExceptionallyOnError() {\n+ // given\n+ final var key = Protocol.encodePartitionId(1, 1);\n+ final var api = new QueryApiImpl(brokerClient);\n+ final var timeout = Duration.ofSeconds(5);\n+ final var stub =\n+ new QueryStub(\n+ new BrokerErrorResponse<>(\n+ new BrokerError(ErrorCode.PARTITION_LEADER_MISMATCH, \"Leader mismatch\")));\n+ stub.registerWith(brokerClient);\n+\n+ // when\n+ final var result = querier.query(api, key, timeout);\n+\n+ // then\n+ assertThat(result)\n+ .failsWithin(timeout)\n+ .withThrowableOfType(ExecutionException.class)\n+ .havingRootCause()\n+ .isInstanceOf(BrokerErrorException.class);\n+ }\n+\n+ private interface Querier {\n+ CompletionStage<String> query(final QueryApi api, final long key, final Duration timeout);\n+ }\n+}\ndiff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java\nnew file mode 100644\nindex 0000000..2f8334e\n--- /dev/null\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java\n@@ -0,0 +1,31 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.gateway.query;\n+\n+import io.camunda.zeebe.gateway.api.util.StubbedBrokerClient;\n+import io.camunda.zeebe.gateway.api.util.StubbedBrokerClient.RequestStub;\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerResponse;\n+import io.camunda.zeebe.gateway.query.impl.BrokerExecuteQuery;\n+\n+final class QueryStub implements RequestStub<BrokerExecuteQuery, BrokerResponse<String>> {\n+ private final BrokerResponse<String> response;\n+\n+ public QueryStub(final BrokerResponse<String> response) {\n+ this.response = response;\n+ }\n+\n+ @Override\n+ public void registerWith(final StubbedBrokerClient gateway) {\n+ gateway.registerHandler(BrokerExecuteQuery.class, this);\n+ }\n+\n+ @Override\n+ public BrokerResponse<String> handle(final BrokerExecuteQuery request) throws Exception {\n+ return response;\n+ }\n+}\n", "diff --git a/vsts.yml b/vsts.yml\nindex 6cb0eb3..a058238 100644\n--- a/vsts.yml\n+++ b/vsts.yml\n@@ -86,13 +86,13 @@ jobs:\n killall Electron\n fi\n displayName: Make sure Electron isn't running from previous tests\n-\n- - bash: |\n- cd src\n- python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n- displayName: Verify non proprietary ffmpeg\n- condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n- timeoutInMinutes: 5\n+# FIXME(alexeykuzmin)\n+# - bash: |\n+# cd src\n+# python electron/script/verify-ffmpeg.py --source-root \"$PWD\" --build-dir out/Default --ffmpeg-path out/ffmpeg\n+# displayName: Verify non proprietary ffmpeg\n+# condition: and(succeeded(), eq(variables['RUN_TESTS'], '1'))\n+# timeoutInMinutes: 5\n \n - bash: |\n cd src\n", "diff --git a/playground/docker-compose.yml b/playground/docker-compose.yml\nnew file mode 100644\nindex 0000000..b8ac6aa\n--- /dev/null\n+++ b/playground/docker-compose.yml\n@@ -0,0 +1,18 @@\n+version: '3.3'\n+\n+services:\n+ db:\n+ container_name: panda-mysql\n+ image: mariadb:10.7.1-focal\n+ restart: always\n+ ports:\n+ - 3310:3306\n+ environment:\n+ MARIADB_ROOT_PASSWORD: root\n+ MARIADB_DATABASE: panda\n+ volumes:\n+ - panda-mysql:/var/lib/mysql\n+\n+volumes:\n+ panda-mysql:\n+ driver: local\ndiff --git a/playground/package.json b/playground/package.json\nindex eab6f62..0feccbb 100644\n--- a/playground/package.json\n+++ b/playground/package.json\n@@ -9,6 +9,9 @@\n \"start\": \"next start\",\n \"lint\": \"next lint\",\n \"dev\": \"next dev\",\n+ \"db:start\": \"docker-compose up -d\",\n+ \"db:stop\": \"docker-compose down\",\n+ \"db:push\": \"prisma db push --skip-generate\",\n \"db:generate\": \"prisma generate\",\n \"db:reset\": \"prisma migrate reset\",\n \"db:studio\": \"prisma studio\"\ndiff --git a/playground/prisma/dev.db b/playground/prisma/dev.db\ndeleted file mode 100644\nindex aa8281f..0000000\nBinary files a/playground/prisma/dev.db and /dev/null differ\ndiff --git a/playground/prisma/migrations/20230204163131_init/migration.sql b/playground/prisma/migrations/20230204163131_init/migration.sql\ndeleted file mode 100644\nindex b3c34f7..0000000\n--- a/playground/prisma/migrations/20230204163131_init/migration.sql\n+++ /dev/null\n@@ -1,8 +0,0 @@\n--- CreateTable\n-CREATE TABLE \"Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"config\" TEXT NOT NULL,\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\ndiff --git a/playground/prisma/migrations/20230208183556_/migration.sql b/playground/prisma/migrations/20230208183556_/migration.sql\ndeleted file mode 100644\nindex 619fd84..0000000\n--- a/playground/prisma/migrations/20230208183556_/migration.sql\n+++ /dev/null\n@@ -1,20 +0,0 @@\n-/*\n- Warnings:\n-\n- - You are about to drop the column `config` on the `Session` table. All the data in the column will be lost.\n-\n-*/\n--- RedefineTables\n-PRAGMA foreign_keys=OFF;\n-CREATE TABLE \"new_Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"theme\" TEXT NOT NULL DEFAULT '',\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\n-INSERT INTO \"new_Session\" (\"code\", \"createdAt\", \"id\", \"view\") SELECT \"code\", \"createdAt\", \"id\", \"view\" FROM \"Session\";\n-DROP TABLE \"Session\";\n-ALTER TABLE \"new_Session\" RENAME TO \"Session\";\n-PRAGMA foreign_key_check;\n-PRAGMA foreign_keys=ON;\ndiff --git a/playground/prisma/migrations/20230529181831_init/migration.sql b/playground/prisma/migrations/20230529181831_init/migration.sql\nnew file mode 100644\nindex 0000000..ffe5546\n--- /dev/null\n+++ b/playground/prisma/migrations/20230529181831_init/migration.sql\n@@ -0,0 +1,9 @@\n+-- CreateTable\n+CREATE TABLE `Session` (\n+ `id` VARCHAR(191) NOT NULL,\n+ `code` TEXT NOT NULL,\n+ `theme` TEXT NOT NULL,\n+ `createdAt` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3),\n+\n+ PRIMARY KEY (`id`)\n+) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;\ndiff --git a/playground/prisma/migrations/migration_lock.toml b/playground/prisma/migrations/migration_lock.toml\nindex e5e5c47..e5a788a 100644\n--- a/playground/prisma/migrations/migration_lock.toml\n+++ b/playground/prisma/migrations/migration_lock.toml\n@@ -1,3 +1,3 @@\n # Please do not edit this file manually\n # It should be added in your version-control system (i.e. Git)\n-provider = \"sqlite\"\n\\ No newline at end of file\n+provider = \"mysql\"\n\\ No newline at end of file\ndiff --git a/playground/prisma/schema.prisma b/playground/prisma/schema.prisma\nindex e84678a..9e1281e 100644\n--- a/playground/prisma/schema.prisma\n+++ b/playground/prisma/schema.prisma\n@@ -2,16 +2,14 @@ generator client {\n provider = \"prisma-client-js\"\n }\n \n-// Using SQLite for local development\n datasource db {\n- provider = \"sqlite\"\n- url = \"file:dev.db\"\n+ provider = \"mysql\"\n+ url = env(\"DATABASE_URL\")\n }\n \n model Session {\n- id String @id\n- code String\n- theme String @default(\"\")\n- view String @default(\"code\")\n+ id String @id @default(cuid())\n+ code String @db.Text\n+ theme String @db.Text\n createdAt DateTime @default(now())\n }\ndiff --git a/playground/src/app/[id]/page.tsx b/playground/src/app/[id]/page.tsx\nindex 40c21f0..a88d2b9 100644\n--- a/playground/src/app/[id]/page.tsx\n+++ b/playground/src/app/[id]/page.tsx\n@@ -6,9 +6,9 @@ const Page = async (props: any) => {\n params: { id },\n } = props\n \n- const initialState = await prisma?.session.findFirst({\n+ const initialState = await prisma.session.findFirst({\n where: { id },\n- select: { code: true, theme: true, view: true },\n+ select: { code: true, theme: true },\n })\n \n return <Playground intialState={initialState} />\ndiff --git a/playground/src/components/Editor.tsx b/playground/src/components/Editor.tsx\nindex 8263dba..e82469a 100644\n--- a/playground/src/components/Editor.tsx\n+++ b/playground/src/components/Editor.tsx\n@@ -123,10 +123,7 @@ export const Editor = (props: EditorProps) => {\n \n return (\n <Flex flex=\"1\" direction=\"column\" align=\"flex-start\">\n- <Tabs\n- defaultValue={value.view}\n- className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}\n- >\n+ <Tabs defaultValue=\"code\" className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}>\n <TabList\n className={css({\n px: '6',\ndiff --git a/playground/src/components/usePlayground.ts b/playground/src/components/usePlayground.ts\nindex 74b6069..a959fca 100644\n--- a/playground/src/components/usePlayground.ts\n+++ b/playground/src/components/usePlayground.ts\n@@ -4,7 +4,6 @@ import { Layout } from './LayoutControl'\n export type State = {\n code: string\n theme: string\n- view: string\n }\n \n export type UsePlayGroundProps = {\n@@ -51,7 +50,7 @@ export const App = () => {\n body: JSON.stringify(state),\n })\n .then((response) => response.json())\n- .then((data) => {\n+ .then(({ data }) => {\n history.pushState({ id: data.id }, '', data.id)\n setIsPristine(true)\n })\ndiff --git a/playground/src/pages/api/share.ts b/playground/src/pages/api/share.ts\nindex 23f8b9e..e6f3f26 100644\n--- a/playground/src/pages/api/share.ts\n+++ b/playground/src/pages/api/share.ts\n@@ -7,17 +7,16 @@ import { prisma } from '../../client/prisma'\n const schema = z.object({\n code: z.string(),\n theme: z.string(),\n- view: z.enum(['code', 'config']).optional(),\n })\n \n const handler = async (req: NextApiRequest, res: NextApiResponse) =>\n match(req)\n .with({ method: 'POST' }, async () => {\n try {\n- const { code, theme } = schema.parse(req.body)\n+ const data = schema.parse(req.body)\n const id = nanoid(10)\n- await prisma.session.create({ data: { id, code, theme } })\n- return res.status(200).json({ id })\n+ const session = await prisma.session.create({ data: { id, ...data }, select: { id: true } })\n+ return res.status(200).json({ success: true, data: session })\n } catch (e) {\n console.log(e)\n return res.status(500).json({ success: false })\n", "diff --git a/packages/tui/src/widgets/button.rs b/packages/tui/src/widgets/button.rs\nindex f3ebc79..845a60c 100644\n--- a/packages/tui/src/widgets/button.rs\n+++ b/packages/tui/src/widgets/button.rs\n@@ -32,7 +32,6 @@ pub(crate) fn Button<'a>(cx: Scope<'a, ButtonProps>) -> Element<'a> {\n callback.call(FormData {\n value: text.to_string(),\n values: HashMap::new(),\n- files: None,\n });\n }\n state.set(new_state);\ndiff --git a/packages/tui/src/widgets/checkbox.rs b/packages/tui/src/widgets/checkbox.rs\nindex 4831172..90c7212 100644\n--- a/packages/tui/src/widgets/checkbox.rs\n+++ b/packages/tui/src/widgets/checkbox.rs\n@@ -56,7 +56,6 @@ pub(crate) fn CheckBox<'a>(cx: Scope<'a, CheckBoxProps>) -> Element<'a> {\n \"on\".to_string()\n },\n values: HashMap::new(),\n- files: None,\n });\n }\n state.set(new_state);\ndiff --git a/packages/tui/src/widgets/number.rs b/packages/tui/src/widgets/number.rs\nindex 05cb2d6..93f9edd 100644\n--- a/packages/tui/src/widgets/number.rs\n+++ b/packages/tui/src/widgets/number.rs\n@@ -84,7 +84,6 @@ pub(crate) fn NumbericInput<'a>(cx: Scope<'a, NumbericInputProps>) -> Element<'a\n input_handler.call(FormData {\n value: text,\n values: HashMap::new(),\n- files: None,\n });\n }\n };\ndiff --git a/packages/tui/src/widgets/password.rs b/packages/tui/src/widgets/password.rs\nindex 7f8455d..d7e978f 100644\n--- a/packages/tui/src/widgets/password.rs\n+++ b/packages/tui/src/widgets/password.rs\n@@ -99,7 +99,6 @@ pub(crate) fn Password<'a>(cx: Scope<'a, PasswordProps>) -> Element<'a> {\n input_handler.call(FormData{\n value: text.clone(),\n values: HashMap::new(),\n- files: None\n });\n }\n \ndiff --git a/packages/tui/src/widgets/slider.rs b/packages/tui/src/widgets/slider.rs\nindex 43f0ac7..257c765 100644\n--- a/packages/tui/src/widgets/slider.rs\n+++ b/packages/tui/src/widgets/slider.rs\n@@ -58,7 +58,6 @@ pub(crate) fn Slider<'a>(cx: Scope<'a, SliderProps>) -> Element<'a> {\n oninput.call(FormData {\n value,\n values: HashMap::new(),\n- files: None,\n });\n }\n };\ndiff --git a/packages/tui/src/widgets/textbox.rs b/packages/tui/src/widgets/textbox.rs\nindex 8628fca..ce0ffcc 100644\n--- a/packages/tui/src/widgets/textbox.rs\n+++ b/packages/tui/src/widgets/textbox.rs\n@@ -95,7 +95,6 @@ pub(crate) fn TextBox<'a>(cx: Scope<'a, TextBoxProps>) -> Element<'a> {\n input_handler.call(FormData{\n value: text.clone(),\n values: HashMap::new(),\n- files: None\n });\n }\n \ndiff --git a/packages/web/src/dom.rs b/packages/web/src/dom.rs\nindex 7fa3d20..5037c4d 100644\n--- a/packages/web/src/dom.rs\n+++ b/packages/web/src/dom.rs\n@@ -331,11 +331,7 @@ fn read_input_to_data(target: Element) -> Rc<FormData> {\n }\n }\n \n- Rc::new(FormData {\n- value,\n- values,\n- files: None,\n- })\n+ Rc::new(FormData { value, values })\n }\n \n fn walk_event_for_id(event: &web_sys::Event) -> Option<(ElementId, web_sys::Element)> {\n"]
5
["7dbbb64c45506ef634180638db800b6d9535523d", "bed86aeae8dad2dd6371635cd24bf8ef3db80361", "9187415f5ee35d2e88dd834e413fc16bf19c5db1", "9c2c7ea1d4935d30e014ca807a4f9cb1665b1e41", "a81bbb83d64867f08c4d1be10919ef6806a1bf51"]
["refactor", "test", "ci", "feat", "fix"]
build updates,template properties,add classname and style props for Playground,use new freespace config for disk space recory test,permission check
["diff --git a/demo/vanilla_new/css/404.min.css b/demo/vanilla_new/css/404.min.css\nindex a3485b4..e69de29 100644\n--- a/demo/vanilla_new/css/404.min.css\n+++ b/demo/vanilla_new/css/404.min.css\n@@ -1 +0,0 @@\n-@import url(https://fonts.googleapis.com/css?family=Share+Tech+Mono%7CSpace+Mono);a,abbr,acronym,address,applet,article,aside,audio,b,big,blockquote,body,canvas,caption,center,cite,code,dd,del,details,dfn,div,dl,dt,em,embed,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,html,i,iframe,img,ins,kbd,label,legend,li,mark,menu,nav,object,ol,output,p,pre,q,ruby,s,samp,section,small,span,strike,strong,sub,summary,sup,table,tbody,td,tfoot,th,thead,time,tr,tt,u,ul,var,video{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:after,blockquote:before,q:after,q:before{content:\"\";content:none}table{border-collapse:collapse;border-spacing:0}body{padding:0;margin:0;font-size:18px}.container{min-height:100vh;position:relative;padding:240px 0;box-sizing:border-box}.overlay{position:absolute;top:0;left:0;width:100%;height:100%;overflow:hidden}.content{position:absolute;top:50%;left:50%;width:100%;transform:translate(-50%,-50%)}.message{text-align:center;color:#000}.message-heading{font-family:\"Share Tech Mono\";font-weight:900;text-transform:uppercase;letter-spacing:.7em;font-size:2rem;padding:0 0 0 1.4em}.message-description{font-family:\"Space Mono\";line-height:42px;font-size:15px;letter-spacing:.15rem;padding:0 20px;max-width:600px;margin:auto}.links{max-width:600px;margin:40px auto 0;text-align:center}.links a{width:170px;display:inline-block;padding:15px 0;margin:0 15px;border:1px solid #000;color:#000;text-decoration:none;font-family:\"Space Mono\";text-transform:uppercase;font-size:11px;letter-spacing:.1rem;position:relative}.links a:before{content:\"\";height:42px;background:#000;position:absolute;top:0;right:0;width:0;transition:all .3s}.links a:after{transition:all .3s;z-index:999;position:relative;content:\"back to hompage\"}.links a:hover:before{width:170px}.links a:hover:after{color:#fff}.links a:nth-child(2){background:#fff;color:#000}.links a:nth-child(2):before{background:#212121;left:0}.links a:nth-child(2):after{content:\"report error\"}.links a:nth-child(2):hover:after{color:#fff}.social{position:absolute;bottom:15px;left:15px}.social-list{margin:0;padding:0;list-style-type:none}.social-list li{display:inline-block;margin:5px 10px}.social-list li a{color:#000}@media (max-width:480px){.message-heading{font-size:1rem;margin-bottom:30px}.message-description{font-size:.7rem;line-height:2rem}.links a{margin:10px;width:280px}.social{left:50%;margin-left:-55px}}\ndiff --git a/demo/vanilla_new/css/main.min.css b/demo/vanilla_new/css/main.min.css\nindex 043eb4f..e69de29 100644\n--- a/demo/vanilla_new/css/main.min.css\n+++ b/demo/vanilla_new/css/main.min.css\n@@ -1 +0,0 @@\n-html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden;background:#000}#floating-container{right:20px;top:20px;position:fixed;z-index:4000}\ndiff --git a/demo/vanilla_new/js/404.min.js b/demo/vanilla_new/js/404.min.js\nindex 3642106..e69de29 100644\n--- a/demo/vanilla_new/js/404.min.js\n+++ b/demo/vanilla_new/js/404.min.js\n@@ -1 +0,0 @@\n-tsParticles.loadJSON(\"tsparticles\",\"/configs/404.json\");\ndiff --git a/website/css/404.min.css b/website/css/404.min.css\nindex a3485b4..e69de29 100644\n--- a/website/css/404.min.css\n+++ b/website/css/404.min.css\n@@ -1 +0,0 @@\n-@import url(https://fonts.googleapis.com/css?family=Share+Tech+Mono%7CSpace+Mono);a,abbr,acronym,address,applet,article,aside,audio,b,big,blockquote,body,canvas,caption,center,cite,code,dd,del,details,dfn,div,dl,dt,em,embed,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,html,i,iframe,img,ins,kbd,label,legend,li,mark,menu,nav,object,ol,output,p,pre,q,ruby,s,samp,section,small,span,strike,strong,sub,summary,sup,table,tbody,td,tfoot,th,thead,time,tr,tt,u,ul,var,video{margin:0;padding:0;border:0;font-size:100%;font:inherit;vertical-align:baseline}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section{display:block}body{line-height:1}ol,ul{list-style:none}blockquote,q{quotes:none}blockquote:after,blockquote:before,q:after,q:before{content:\"\";content:none}table{border-collapse:collapse;border-spacing:0}body{padding:0;margin:0;font-size:18px}.container{min-height:100vh;position:relative;padding:240px 0;box-sizing:border-box}.overlay{position:absolute;top:0;left:0;width:100%;height:100%;overflow:hidden}.content{position:absolute;top:50%;left:50%;width:100%;transform:translate(-50%,-50%)}.message{text-align:center;color:#000}.message-heading{font-family:\"Share Tech Mono\";font-weight:900;text-transform:uppercase;letter-spacing:.7em;font-size:2rem;padding:0 0 0 1.4em}.message-description{font-family:\"Space Mono\";line-height:42px;font-size:15px;letter-spacing:.15rem;padding:0 20px;max-width:600px;margin:auto}.links{max-width:600px;margin:40px auto 0;text-align:center}.links a{width:170px;display:inline-block;padding:15px 0;margin:0 15px;border:1px solid #000;color:#000;text-decoration:none;font-family:\"Space Mono\";text-transform:uppercase;font-size:11px;letter-spacing:.1rem;position:relative}.links a:before{content:\"\";height:42px;background:#000;position:absolute;top:0;right:0;width:0;transition:all .3s}.links a:after{transition:all .3s;z-index:999;position:relative;content:\"back to hompage\"}.links a:hover:before{width:170px}.links a:hover:after{color:#fff}.links a:nth-child(2){background:#fff;color:#000}.links a:nth-child(2):before{background:#212121;left:0}.links a:nth-child(2):after{content:\"report error\"}.links a:nth-child(2):hover:after{color:#fff}.social{position:absolute;bottom:15px;left:15px}.social-list{margin:0;padding:0;list-style-type:none}.social-list li{display:inline-block;margin:5px 10px}.social-list li a{color:#000}@media (max-width:480px){.message-heading{font-size:1rem;margin-bottom:30px}.message-description{font-size:.7rem;line-height:2rem}.links a{margin:10px;width:280px}.social{left:50%;margin-left:-55px}}\ndiff --git a/website/css/main.min.css b/website/css/main.min.css\nindex 818002f..e69de29 100644\n--- a/website/css/main.min.css\n+++ b/website/css/main.min.css\n@@ -1 +0,0 @@\n-@font-face{font-family:Polya;src:url(https://raw.githubusercontent.com/matteobruni/tsparticles/gh-pages/fonts/Polya.otf)}html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden;background:#000}.github{bottom:10px;right:10px;padding:0 12px 6px;position:fixed;border-radius:10px;background:#fff;border:1px solid #000}.github a{color:#000}.github a:active,.github a:hover,.github a:link,.github a:visited{color:#000;text-decoration:none}.github img{height:30px}.github #gh-project{font-size:20px;padding-left:5px;font-weight:700;vertical-align:bottom}.toggle-sidebar{top:50%;left:0;font-size:20px;color:#000;position:absolute;padding:3px;border-top-right-radius:5px;border-bottom-right-radius:5px;background:#e7e7e7;border:1px solid #000;border-left:none}#editor{background:#fff}[hidden]{display:none}#repulse-div{width:200px;height:200px;background-color:rgba(255,255,255,.5);border-radius:100px;position:absolute;top:50%;left:50%;margin-left:-100px;margin-top:-100px;z-index:200}@media (min-width:1600px) and (-webkit-device-pixel-ratio:1){.col-xxl-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}}.btn-react{color:#fff;background-color:#61dafb;border-color:#fff}.btn-react:hover{color:#fff;background-color:#5aa3c4;border-color:#ccc}.btn-react.focus,.btn-react:focus{color:#fff;background-color:#5aa3c4;border-color:#ccc;box-shadow:0 0 0 .2rem rgba(90,163,196,.5)}.btn-react.disabled,.btn-react:disabled{color:#fff;background-color:#61dafb;border-color:#ccc}.btn-react:not(:disabled):not(.disabled).active,.btn-react:not(:disabled):not(.disabled):active,.show>.btn-react.dropdown-toggle{color:#fff;background-color:#5aa3c4;border-color:#ccc}.btn-react:not(:disabled):not(.disabled).active:focus,.btn-react:not(:disabled):not(.disabled):active:focus,.show>.btn-react.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(90,163,196,.5)}#stats,.count-particles{-webkit-user-select:none}#stats{overflow:hidden}#stats-graph canvas{border-radius:3px 3px 0 0}.count-particles{border-radius:0 0 3px 3px}\ndiff --git a/website/css/presets.min.css b/website/css/presets.min.css\nindex 6c2ae2c..e69de29 100644\n--- a/website/css/presets.min.css\n+++ b/website/css/presets.min.css\n@@ -1 +0,0 @@\n-html{height:100%;overflow:hidden}body{line-height:1;height:100%;overflow:hidden}\n", "diff --git a/docs/docs/segment-angular.md b/docs/docs/segment-angular.md\nindex b7ff7d8..c307239 100644\n--- a/docs/docs/segment-angular.md\n+++ b/docs/docs/segment-angular.md\n@@ -29,3 +29,17 @@ Display the currently active Angular CLI version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `angular.json` file is present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-azfunc.md b/docs/docs/segment-azfunc.md\nindex 6b4368a..984c0fb 100644\n--- a/docs/docs/segment-azfunc.md\n+++ b/docs/docs/segment-azfunc.md\n@@ -33,3 +33,17 @@ Display the currently active Azure functions CLI version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when a `host.json` or `local.settings.json` files is present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-crystal.md b/docs/docs/segment-crystal.md\nindex 9cf8ead..8f995bc 100644\n--- a/docs/docs/segment-crystal.md\n+++ b/docs/docs/segment-crystal.md\n@@ -32,3 +32,17 @@ Display the currently active crystal version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.cr` or `shard.yml` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+ properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-dart.md b/docs/docs/segment-dart.md\nindex ddfe247..9eb1d0e 100644\n--- a/docs/docs/segment-dart.md\n+++ b/docs/docs/segment-dart.md\n@@ -33,3 +33,17 @@ Display the currently active dart version.\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.dart`, `pubspec.yaml`, `pubspec.yml`, `pubspec.lock` files or the `.dart_tool`\n folder are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-dotnet.md b/docs/docs/segment-dotnet.md\nindex a8300c1..83bb0c2 100644\n--- a/docs/docs/segment-dotnet.md\n+++ b/docs/docs/segment-dotnet.md\n@@ -37,12 +37,13 @@ Display the currently active .NET SDK version.\n - unsupported_version_icon: `string` - text/icon that is displayed when the active .NET SDK version (e.g., one specified\n by `global.json`) is not installed/supported - defaults to `\\uf071` (X in a rectangle box)\n - template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n-properties below. Defaults does nothing(backward compatibility).\n+properties below. Defaults to `{{ .Full }}`\n - version_url_template: `string` - A go [text/template][go-text-template] template extended\n with [sprig][sprig] utilizing the properties below. Defaults does nothing(backward compatibility).\n \n ## Template Properties\n \n+- `.Full`: `string` - the full version\n - `.Major`: `string` - is the major version\n - `.Minor`: `string` - is the minor version\n - `.Patch`: `string` - is the patch version\ndiff --git a/docs/docs/segment-golang.md b/docs/docs/segment-golang.md\nindex 10321d3..7790269 100644\n--- a/docs/docs/segment-golang.md\n+++ b/docs/docs/segment-golang.md\n@@ -32,3 +32,14 @@ Display the currently active golang version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.go` or `go.mod` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\ndiff --git a/docs/docs/segment-java.md b/docs/docs/segment-java.md\nindex f4cc85d..c13c3e0 100644\n--- a/docs/docs/segment-java.md\n+++ b/docs/docs/segment-java.md\n@@ -45,3 +45,14 @@ Display the currently active java version.\n - `*.jar`\n - `*.clj`\n - `*.cljc`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\ndiff --git a/docs/docs/segment-julia.md b/docs/docs/segment-julia.md\nindex 4b75608..3a4a0ec 100644\n--- a/docs/docs/segment-julia.md\n+++ b/docs/docs/segment-julia.md\n@@ -32,3 +32,17 @@ Display the currently active julia version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.jl` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-node.md b/docs/docs/segment-node.md\nindex 04d5963..ced7d23 100644\n--- a/docs/docs/segment-node.md\n+++ b/docs/docs/segment-node.md\n@@ -40,3 +40,17 @@ segment's background or foreground color\n - display_package_manager: `boolean` - show whether the current project uses Yarn or NPM - defaults to `false`\n - yarn_icon: `string` - the icon/text to display when using Yarn - defaults to ` \\uF61A`\n - npm_icon: `string` - the icon/text to display when using NPM - defaults to ` \\uE71E`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-php.md b/docs/docs/segment-php.md\nindex a7b05aa..47b8ea4 100644\n--- a/docs/docs/segment-php.md\n+++ b/docs/docs/segment-php.md\n@@ -34,3 +34,17 @@ Display the currently active php version.\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.php, composer.json, composer.lock, .php-version` files are present (default)\n - enable_hyperlink: `bool` - display an hyperlink to the php release notes - defaults to `false`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-python.md b/docs/docs/segment-python.md\nindex 80fa718..13bd1f8 100644\n--- a/docs/docs/segment-python.md\n+++ b/docs/docs/segment-python.md\n@@ -39,3 +39,17 @@ or not - defaults to `true`\n files are present (default)\n - `environment`: the segment is only displayed when a virtual env is present\n - `context`: the segment is only displayed when either `environment` or `files` is active\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-ruby.md b/docs/docs/segment-ruby.md\nindex e64fcf7..5d812f6 100644\n--- a/docs/docs/segment-ruby.md\n+++ b/docs/docs/segment-ruby.md\n@@ -32,3 +32,17 @@ Display the currently active ruby version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.rb`, `Gemfile` or `Rakefile` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-rust.md b/docs/docs/segment-rust.md\nindex 30c222d..c0f2a43 100644\n--- a/docs/docs/segment-rust.md\n+++ b/docs/docs/segment-rust.md\n@@ -32,3 +32,17 @@ Display the currently active rust version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.rs`, `Cargo.toml` or `Cargo.lock` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/src/segment_language.go b/src/segment_language.go\nindex d9ced7b..2cfffa8 100644\n--- a/src/segment_language.go\n+++ b/src/segment_language.go\n@@ -97,7 +97,7 @@ func (l *language) string() string {\n \t\treturn \"\"\n \t}\n \n-\tsegmentTemplate := l.props.getString(SegmentTemplate, \"{{.Full}}\")\n+\tsegmentTemplate := l.props.getString(SegmentTemplate, \"{{ .Full }}\")\n \ttemplate := &textTemplate{\n \t\tTemplate: segmentTemplate,\n \t\tContext: l.version,\n", "diff --git a/packages/docz-theme-default/src/components/ui/Render.tsx b/packages/docz-theme-default/src/components/ui/Render.tsx\nindex 197359b..943f9ab 100644\n--- a/packages/docz-theme-default/src/components/ui/Render.tsx\n+++ b/packages/docz-theme-default/src/components/ui/Render.tsx\n@@ -24,9 +24,16 @@ const Code = styled('div')`\n }\n `\n \n-export const Render: RenderComponent = ({ component, code }) => (\n+export const Render: RenderComponent = ({\n+ component,\n+ code,\n+ className,\n+ style,\n+}) => (\n <Fragment>\n- <Playground>{component}</Playground>\n+ <Playground className={className} style={style}>\n+ {component}\n+ </Playground>\n <Code>{code}</Code>\n </Fragment>\n )\ndiff --git a/packages/docz/src/components/DocPreview.tsx b/packages/docz/src/components/DocPreview.tsx\nindex ca2d88f..ee8f7c0 100644\n--- a/packages/docz/src/components/DocPreview.tsx\n+++ b/packages/docz/src/components/DocPreview.tsx\n@@ -16,6 +16,8 @@ const DefaultLoading: SFC = () => null\n export type RenderComponent = ComponentType<{\n component: JSX.Element\n code: any\n+ className?: string\n+ style?: any\n }>\n \n export const DefaultRender: RenderComponent = ({ component, code }) => (\ndiff --git a/packages/docz/src/components/Playground.tsx b/packages/docz/src/components/Playground.tsx\nindex d6ff5a3..418c82e 100644\n--- a/packages/docz/src/components/Playground.tsx\n+++ b/packages/docz/src/components/Playground.tsx\n@@ -9,15 +9,21 @@ export interface PlaygroundProps {\n __code: (components: ComponentsMap) => any\n children: any\n components: ComponentsMap\n+ className?: string\n+ style?: any\n }\n \n const BasePlayground: SFC<PlaygroundProps> = ({\n components,\n children,\n __code,\n+ className,\n+ style,\n }) => {\n return components && components.render ? (\n <components.render\n+ className={className}\n+ style={style}\n component={isFn(children) ? children() : children}\n code={__code(components)}\n />\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n", "diff --git a/server/src/routes/course/index.ts b/server/src/routes/course/index.ts\nindex 557f5fb..bc0e490 100644\n--- a/server/src/routes/course/index.ts\n+++ b/server/src/routes/course/index.ts\n@@ -209,7 +209,7 @@ function addStudentApi(router: Router, logger: ILogger) {\n router.post('/student/:githubId/status', ...mentorValidators, updateStudentStatus(logger));\n router.post('/student/:githubId/status-self', courseGuard, selfUpdateStudentStatus(logger));\n router.get('/student/:githubId/score', courseGuard, getScoreByStudent(logger));\n- router.post('/student/:githubId/certificate', courseManagerGuard, ...validators, postStudentCertificate(logger));\n+ router.post('/student/:githubId/certificate', courseManagerGuard, validateGithubId, postStudentCertificate(logger));\n \n router.get('/students', courseSupervisorGuard, getStudents(logger));\n router.get('/students/csv', courseSupervisorGuard, getStudentsCsv(logger));\n"]
5
["9acf7a062ee9c0538c2cd4661c1f5da61ab06316", "3a4e21c36d76b4bea8dbb365d3c3bd005a7f3f8f", "1b64ed30a2e3c41abf3976efee4c7463044b2ef1", "672cd2b9775fb6dac2d522cb3f4469db47c0556b", "33c25b2f59c931a7f4af994365522221a7821dca"]
["build", "docs", "feat", "test", "fix"]
fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>,reintroduce timeout for assertion The timeout had been removed by a previous commit. Without the timeout the test might be flaky. Also removed obsolete code,add donation section to footer,verify process can start at supported element types Verifies a PI can be started at specific element types. The test will deploy the process, start an instance at the desired start element and verify that it has been activated succesfully.,do not run tests and build when no changes reported by lerna
["diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\nindex d0ee4f3..c2ab83c 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n@@ -13,6 +13,7 @@ import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ACTI\n import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ELEMENT_ACTIVATING;\n import static java.util.function.Predicate.isEqual;\n import static org.assertj.core.api.Assertions.assertThat;\n+import static org.awaitility.Awaitility.await;\n import static org.mockito.ArgumentMatchers.any;\n import static org.mockito.ArgumentMatchers.anyLong;\n import static org.mockito.ArgumentMatchers.eq;\n@@ -30,7 +31,6 @@ import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n import io.camunda.zeebe.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.streamprocessor.StreamProcessorMode;\n-import org.awaitility.Awaitility;\n import org.junit.Rule;\n import org.junit.Test;\n import org.mockito.InOrder;\n@@ -71,7 +71,7 @@ public final class StreamProcessorReplayModeTest {\n // when\n startStreamProcessor(replayUntilEnd);\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> assertThat(getCurrentPhase(replayUntilEnd)).isEqualTo(Phase.PROCESSING));\n \n@@ -163,7 +163,7 @@ public final class StreamProcessorReplayModeTest {\n command().processInstance(ACTIVATE_ELEMENT, RECORD),\n event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n \n- Awaitility.await(\"should have replayed first events\")\n+ await(\"should have replayed first events\")\n .until(replayContinuously::getLastSuccessfulProcessedRecordPosition, (pos) -> pos > 0);\n \n // when\n@@ -210,7 +210,7 @@ public final class StreamProcessorReplayModeTest {\n command().processInstance(ACTIVATE_ELEMENT, RECORD),\n event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n \n- Awaitility.await(\"should have replayed first events\")\n+ await(\"should have replayed first events\")\n .until(replayContinuously::getLastSuccessfulProcessedRecordPosition, (pos) -> pos > 0);\n streamProcessor.pauseProcessing().join();\n replayContinuously.writeBatch(\n@@ -244,7 +244,7 @@ public final class StreamProcessorReplayModeTest {\n // then\n verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> {\n final var lastProcessedPosition = getLastProcessedPosition(replayContinuously);\n@@ -273,8 +273,7 @@ public final class StreamProcessorReplayModeTest {\n \n verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n \n- Awaitility.await()\n- .until(() -> getLastProcessedPosition(replayContinuously), isEqual(commandPosition));\n+ await().until(() -> getLastProcessedPosition(replayContinuously), isEqual(commandPosition));\n \n // then\n assertThat(replayContinuously.getLastSuccessfulProcessedRecordPosition())\n@@ -285,7 +284,6 @@ public final class StreamProcessorReplayModeTest {\n @Test\n public void shouldNotSetLastProcessedPositionIfLessThanSnapshotPosition() {\n // given\n- final var commandPositionBeforeSnapshot = 1L;\n final var snapshotPosition = 2L;\n \n startStreamProcessor(replayContinuously);\n@@ -298,23 +296,20 @@ public final class StreamProcessorReplayModeTest {\n // when\n startStreamProcessor(replayContinuously);\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> assertThat(getCurrentPhase(replayContinuously)).isEqualTo(Phase.REPLAY));\n \n- final var eventPosition =\n- replayContinuously.writeEvent(\n- ELEMENT_ACTIVATING,\n- RECORD,\n- writer -> writer.sourceRecordPosition(commandPositionBeforeSnapshot));\n-\n // then\n final var lastProcessedPositionState = replayContinuously.getLastProcessedPositionState();\n \n- assertThat(lastProcessedPositionState.getLastSuccessfulProcessedRecordPosition())\n- .describedAs(\n- \"Expected that the last processed position is not less than the snapshot position\")\n- .isEqualTo(snapshotPosition);\n+ await()\n+ .untilAsserted(\n+ () ->\n+ assertThat(lastProcessedPositionState.getLastSuccessfulProcessedRecordPosition())\n+ .describedAs(\n+ \"Expected that the last processed position is not less than the snapshot position\")\n+ .isEqualTo(snapshotPosition));\n }\n \n private StreamProcessor startStreamProcessor(final StreamProcessorRule streamProcessorRule) {\n", "diff --git a/client/src/components/Feedback.tsx b/client/src/components/Feedback.tsx\nindex 117b21d..0d7e7a9 100644\n--- a/client/src/components/Feedback.tsx\n+++ b/client/src/components/Feedback.tsx\n@@ -16,12 +16,7 @@ const publicRoutes = [\n name: `\ud83d\udcdd Feedback on RS School`,\n link: `https://docs.google.com/forms/d/1F4NeS0oBq-CY805aqiPVp6CIrl4_nIYJ7Z_vUcMOFrQ/viewform`,\n newTab: true,\n- },\n- {\n- name: `\ud83d\udcb0 Make a donation`,\n- link: `https://www.patreon.com/therollingscopes`,\n- newTab: true,\n- },\n+ }\n ];\n \n type LinkInfo = { name: string; link: string; newTab: boolean };\ndiff --git a/client/src/components/FooterLayout.tsx b/client/src/components/FooterLayout.tsx\nindex 79c0f39..56661b4 100644\n--- a/client/src/components/FooterLayout.tsx\n+++ b/client/src/components/FooterLayout.tsx\n@@ -1,5 +1,5 @@\n import * as React from 'react';\n-import { Col, Layout, Row, Divider } from 'antd';\n+import { Col, Layout, Row, Divider, Button } from 'antd';\n import { Feedback } from './Feedback';\n import { Help } from './Help';\n import { SocialNetworks } from './SocialNetworks';\n@@ -23,9 +23,17 @@ class FooterLayout extends React.Component<any, any> {\n </Col>\n </Row>\n <Divider />\n- <div className=\"text-center\">\n- <small>&copy; The Rolling Scopes 2019</small>\n- </div>\n+ <h5> Thank you for your support! \ud83c\udf89</h5>\n+ <p>\n+ <object type=\"image/svg+xml\"\n+ data=\"https://opencollective.com/rsschool/backers.svg?avatarHeight=28&button=false\"></object>\n+ </p>\n+ <p>\n+ <Button size=\"small\" href=\"https://opencollective.com/rsschool#section-contribute\" target=\"_blank\" ghost>\n+ \u2764\ufe0f Make a donation\n+ </Button>\n+ </p>\n+ <p className=\"text-center\"><small>&copy; The Rolling Scopes 2019</small></p>\n </Footer>\n </div>\n );\ndiff --git a/client/src/styles/main.scss b/client/src/styles/main.scss\nindex cd61fcd..6e37ea6 100644\n--- a/client/src/styles/main.scss\n+++ b/client/src/styles/main.scss\n@@ -46,4 +46,7 @@ body,\n padding-right: 0;\n font-size: .7rem;\n }\n+ .ant-btn {\n+ font-size: .7rem;\n+ }\n }\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\nnew file mode 100644\nindex 0000000..a505307\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\n@@ -0,0 +1,233 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.processinstance;\n+\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.groups.Tuple.tuple;\n+\n+import io.camunda.zeebe.engine.util.EngineRule;\n+import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.model.bpmn.BpmnModelInstance;\n+import io.camunda.zeebe.protocol.record.Record;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n+import io.camunda.zeebe.test.util.record.RecordingExporterTestWatcher;\n+import java.util.Collection;\n+import java.util.Collections;\n+import java.util.List;\n+import java.util.Map;\n+import org.junit.ClassRule;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.junit.runners.Parameterized;\n+import org.junit.runners.Parameterized.Parameters;\n+\n+@RunWith(Parameterized.class)\n+public class CreateProcessInstanceSupportedElementTest {\n+\n+ @ClassRule public static final EngineRule ENGINE = EngineRule.singlePartition();\n+ private static final String PROCESS_ID = \"processId\";\n+ private static final String CHILD_PROCESS_ID = \"childProcessId\";\n+ private static final String START_ELEMENT_ID = \"startElement\";\n+ private static final String MESSAGE = \"message\";\n+ private static final String JOBTYPE = \"jobtype\";\n+\n+ @Rule\n+ public final RecordingExporterTestWatcher recordingExporterTestWatcher =\n+ new RecordingExporterTestWatcher();\n+\n+ private final Scenario scenario;\n+\n+ public CreateProcessInstanceSupportedElementTest(final Scenario scenario) {\n+ this.scenario = scenario;\n+ }\n+\n+ @Parameters(name = \"{0}\")\n+ public static Collection<Object> scenarios() {\n+ return List.of(\n+ new Scenario(\n+ BpmnElementType.SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .subProcess(START_ELEMENT_ID)\n+ .embeddedSubProcess()\n+ .startEvent()\n+ .subProcessDone()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .eventSubProcess(\n+ START_ELEMENT_ID, e -> e.startEvent().timerWithDuration(\"PT1H\").endEvent())\n+ .startEvent()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_CATCH_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_THROW_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateThrowEvent(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.END_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().endEvent(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SERVICE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.RECEIVE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .receiveTask(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.USER_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().userTask(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.MANUAL_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .manualTask(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EXCLUSIVE_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .exclusiveGateway(START_ELEMENT_ID)\n+ .defaultFlow()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.PARALLEL_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .parallelGateway(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_BASED_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .eventBasedGateway(START_ELEMENT_ID)\n+ .intermediateCatchEvent()\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .moveToLastGateway()\n+ .intermediateCatchEvent()\n+ .timerWithDuration(\"PT1H\")\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.MULTI_INSTANCE_BODY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(\n+ START_ELEMENT_ID,\n+ t ->\n+ t.zeebeJobType(JOBTYPE)\n+ .multiInstance(m -> m.parallel().zeebeInputCollectionExpression(\"[1]\")))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.CALL_ACTIVITY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .callActivity(START_ELEMENT_ID, c -> c.zeebeProcessId(CHILD_PROCESS_ID))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.BUSINESS_RULE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .businessRuleTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SCRIPT_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .scriptTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SEND_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .sendTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()));\n+ }\n+\n+ @Test\n+ public void testProcessInstanceCanStartAtElementType() {\n+ // given\n+ ENGINE.deployment().withXmlResource(scenario.modelInstance).deploy();\n+ if (scenario.type == BpmnElementType.CALL_ACTIVITY) {\n+ ENGINE.deployment().withXmlResource(getChildProcess()).deploy();\n+ }\n+\n+ // when\n+ final long instanceKey =\n+ ENGINE\n+ .processInstance()\n+ .ofBpmnProcessId(PROCESS_ID)\n+ .withStartInstruction(START_ELEMENT_ID)\n+ .withVariables(scenario.variables)\n+ .create();\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.processInstanceRecords()\n+ .withProcessInstanceKey(instanceKey)\n+ .onlyEvents()\n+ .limit(\n+ r ->\n+ r.getValue().getBpmnElementType() == scenario.type\n+ && r.getIntent() == ProcessInstanceIntent.ELEMENT_ACTIVATED))\n+ .extracting(record -> record.getValue().getBpmnElementType(), Record::getIntent)\n+ .containsSequence(\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATED),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATED));\n+ }\n+\n+ private BpmnModelInstance getChildProcess() {\n+ return Bpmn.createExecutableProcess(CHILD_PROCESS_ID).startEvent().endEvent().done();\n+ }\n+\n+ record Scenario(\n+ BpmnElementType type, BpmnModelInstance modelInstance, Map<String, Object> variables) {}\n+}\n", "diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml\nindex ca46ca4..d69e581 100644\n--- a/.github/workflows/tests.yml\n+++ b/.github/workflows/tests.yml\n@@ -42,23 +42,25 @@ jobs:\n - name: Set CC Required env vars\n run: export GIT_BRANCH=$GITHUB_HEAD_REF && export GIT_COMMIT_SHA=$(git rev-parse origin/$GITHUB_HEAD_REF)\n \n- - name: Build\n- run: yarn build\n-\n - name: Lint\n run: yarn lint\n \n+ - name: Check for changes\n+ id: changed_packages\n+ run: |\n+ echo \"::set-output name=changed_packages::$(node ./node_modules/.bin/lerna changed -p | wc -l)\"\n+\n+ - name: Build\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n+ run: yarn build\n+\n - name: Test\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n run: |\n yarn run-rs-in-background\n yarn coverage > COVERAGE_RESULT\n echo \"$(cat COVERAGE_RESULT)\"\n \n- - name: Check for changes\n- id: changed_packages\n- run: |\n- echo \"::set-output name=changed_packages::$(node ./node_modules/.bin/lerna changed -p | wc -l)\"\n-\n - name: Release dev version for testing\n if: github.ref == 'refs/heads/master' && matrix.node-version == '15.x' && steps.changed_packages.outputs.changed_packages != '0'\n run: |\n@@ -70,11 +72,13 @@ jobs:\n NPM_TOKEN: ${{ secrets.NPM_TOKEN }}\n \n - name: Coveralls\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n uses: coverallsapp/github-action@master\n with:\n github-token: ${{ secrets.GITHUB_TOKEN }}\n \n - name: Codeclimate\n+ if: steps.changed_packages.outputs.changed_packages != '0'\n uses: paambaati/[email protected]\n env:\n CC_TEST_REPORTER_ID: e2a39c5dc1a13674e97e94a467bacfaec953814982c7de89e9f0b55031e43bd8\n"]
5
["9ed3c0c4a72af977fc9150512fb6538f20a94b22", "0d23f1b3ed22e615b9611bb4eae01d2241e64dff", "7704121d0c0bfce49f01c2b41cbc64a642cbb399", "a5ecfdf49b0d4c43fbbbf7947be7c0327ccb3415", "155611c99fe8692f1afc092599f5a7c727893315"]
["docs", "refactor", "feat", "test", "build"]
alerts do not trigger modal lifecycle events fixes #8616,remove unnecessary `parse_json` call in `ops.StructField` impl,avoid cancelling jobs,parallelize pybind11 build,add title to badge icon
["diff --git a/src/components/app/app-root.ts b/src/components/app/app-root.ts\nindex ec7daee..29dc797 100644\n--- a/src/components/app/app-root.ts\n+++ b/src/components/app/app-root.ts\n@@ -15,6 +15,7 @@ export const AppRootToken = new OpaqueToken('USERROOT');\n selector: 'ion-app',\n template:\n '<div #viewport app-viewport></div>' +\n+ '<div #modalPortal overlay-portal></div>' +\n '<div #overlayPortal overlay-portal></div>' +\n '<div #loadingPortal class=\"loading-portal\" overlay-portal></div>' +\n '<div #toastPortal class=\"toast-portal\" overlay-portal></div>' +\n@@ -24,6 +25,8 @@ export class IonicApp extends Ion implements OnInit {\n \n @ViewChild('viewport', {read: ViewContainerRef}) _viewport: ViewContainerRef;\n \n+ @ViewChild('modalPortal', { read: OverlayPortal }) _modalPortal: OverlayPortal;\n+\n @ViewChild('overlayPortal', { read: OverlayPortal }) _overlayPortal: OverlayPortal;\n \n @ViewChild('loadingPortal', { read: OverlayPortal }) _loadingPortal: OverlayPortal;\n@@ -96,6 +99,9 @@ export class IonicApp extends Ion implements OnInit {\n if (portal === AppPortal.TOAST) {\n return this._toastPortal;\n }\n+ if (portal === AppPortal.MODAL) {\n+ return this._modalPortal;\n+ }\n return this._overlayPortal;\n }\n \n@@ -110,6 +116,7 @@ export class IonicApp extends Ion implements OnInit {\n \n export enum AppPortal {\n DEFAULT,\n+ MODAL,\n LOADING,\n TOAST\n };\ndiff --git a/src/components/modal/modal.ts b/src/components/modal/modal.ts\nindex bd4d406..c3e7a62 100644\n--- a/src/components/modal/modal.ts\n+++ b/src/components/modal/modal.ts\n@@ -1,6 +1,7 @@\n import { Injectable } from '@angular/core';\n \n import { App } from '../app/app';\n+import { AppPortal } from '../app/app-root';\n import { isPresent } from '../../util/util';\n import { ModalCmp } from './modal-component';\n import { ModalOptions } from './modal-options';\n@@ -40,7 +41,7 @@ export class Modal extends ViewController {\n * @returns {Promise} Returns a promise which is resolved when the transition has completed.\n */\n present(navOptions: NavOptions = {}) {\n- return this._app.present(this, navOptions);\n+ return this._app.present(this, navOptions, AppPortal.MODAL);\n }\n \n /**\n", "diff --git a/ibis/backends/snowflake/registry.py b/ibis/backends/snowflake/registry.py\nindex cbddf8d..d5a0859 100644\n--- a/ibis/backends/snowflake/registry.py\n+++ b/ibis/backends/snowflake/registry.py\n@@ -231,7 +231,7 @@ operation_registry.update(\n ops.DateFromYMD: fixed_arity(sa.func.date_from_parts, 3),\n ops.StringToTimestamp: fixed_arity(sa.func.to_timestamp_tz, 2),\n ops.RegexExtract: fixed_arity(sa.func.regexp_substr, 3),\n- ops.RegexSearch: fixed_arity(lambda left, right: left.op('REGEXP')(right), 2),\n+ ops.RegexSearch: fixed_arity(sa.sql.operators.custom_op(\"REGEXP\"), 2),\n ops.RegexReplace: fixed_arity(sa.func.regexp_replace, 3),\n ops.ExtractMillisecond: fixed_arity(\n lambda arg: sa.cast(\n@@ -244,8 +244,7 @@ operation_registry.update(\n t.translate(op.arg), _TIMESTAMP_UNITS_TO_SCALE[op.unit]\n ),\n ops.StructField: lambda t, op: sa.cast(\n- sa.func.parse_json(sa.func.get(t.translate(op.arg), op.field)),\n- t.get_sqla_type(op.output_dtype),\n+ sa.func.get(t.translate(op.arg), op.field), t.get_sqla_type(op.output_dtype)\n ),\n ops.NthValue: _nth_value,\n }\n", "diff --git a/.github/workflows/ibis-backends-cloud.yml b/.github/workflows/ibis-backends-cloud.yml\nindex 321708e..b990984 100644\n--- a/.github/workflows/ibis-backends-cloud.yml\n+++ b/.github/workflows/ibis-backends-cloud.yml\n@@ -29,7 +29,9 @@ jobs:\n name: ${{ matrix.backend.title }} python-${{ matrix.python-version }}\n # only a single bigquery or snowflake run at a time, otherwise test data is\n # clobbered by concurrent runs\n- concurrency: ${{ matrix.backend.name }}\n+ concurrency:\n+ group: ${{ matrix.backend.name }}\n+ cancel-in-progress: false\n runs-on: ubuntu-latest\n strategy:\n fail-fast: false\n", "diff --git a/poetry-overrides.nix b/poetry-overrides.nix\nindex d37c5ed..aaaaf02 100644\n--- a/poetry-overrides.nix\n+++ b/poetry-overrides.nix\n@@ -82,4 +82,11 @@ self: super:\n {\n patches = (attrs.patches or [ ]) ++ [ ./patches/watchdog-force-kqueue.patch ];\n });\n+\n+ pybind11 = super.pybind11.overridePythonAttrs (_: {\n+ postBuild = ''\n+ # build tests\n+ make -j $NIX_BUILD_CORES -l $NIX_BUILD_CORES\n+ '';\n+ });\n }\n", "diff --git a/kibbeh/src/modules/room/chat/RoomChatList.tsx b/kibbeh/src/modules/room/chat/RoomChatList.tsx\nindex a7418e6..805a9a4 100644\n--- a/kibbeh/src/modules/room/chat/RoomChatList.tsx\n+++ b/kibbeh/src/modules/room/chat/RoomChatList.tsx\n@@ -16,6 +16,11 @@ interface ChatListProps {\n users: RoomUser[];\n }\n \n+interface BadgeIconData {\n+ emoji: string,\n+ title: string\n+}\n+\n export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const { setData } = useContext(UserPreviewModalContext);\n const { messages, toggleFrozen } = useRoomChatStore();\n@@ -48,11 +53,14 @@ export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const getBadgeIcon = (m: Message) => {\n const user = users.find((u) => u.id === m.userId);\n const isSpeaker = room.creatorId === user?.id || user?.roomPermissions?.isSpeaker;\n- let emoji = null;\n+ let badgeIconData: BadgeIconData | null = null;\n if (isSpeaker) {\n- emoji = \"\ud83d\udce3\";\n+ badgeIconData = {\n+ emoji: \"\ud83d\udce3\",\n+ title: \"Speaker\"\n+ };\n }\n- return emoji && <Twemoji text={emoji} style={{ marginRight: \"1ch\" }}/>;\n+ return badgeIconData && <Twemoji text={badgeIconData.emoji} title={badgeIconData.title} style={{ marginRight: \"1ch\" }}/>;\n };\n \n return (\n"]
5
["e2704a4a25b9e348764e1cc922ca7d6a927550eb", "9e80231539aa307e607e2b82b35df9e09ede8385", "19514bc68624a964c63fc217f163f7b11f3dfe82", "9ab4c61975e073e214646443d088339cfdbaa88d", "6e5098655e6d9bb13f6423abe780cdf6b50ff13a"]
["fix", "refactor", "ci", "build", "feat"]
Improved Config Loading #423,bundle and tree shake assets with webpack,Added tooltip for Data sources table buttons only on small screen,getBorderSize() missing "width" The correct property name to use is "borderWidth", not just "border". "border" works in Chrome but was breaking in Firefox. Also had to change .ui-grid-header's box-sizing to content-box so IE11 would include the border in height calcs. AND finally IE11 was returning fractional heights so Grid parseInt()s the returned values.,uses macros to implement Settings enums
["diff --git a/CHANGELOG.md b/CHANGELOG.md\nindex 76dd749..2087803 100644\n--- a/CHANGELOG.md\n+++ b/CHANGELOG.md\n@@ -7,6 +7,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)\n ## [Unreleased]\n * Fix the tab '(Sync)' suffix in named tabs (https://github.com/zellij-org/zellij/pull/410)\n * Improve performance when multiple panes are open (https://github.com/zellij-org/zellij/pull/318)\n+* Improve error reporting and tests of configuration (https://github.com/zellij-org/zellij/pull/423)\n \n ## [0.6.0] - 2021-04-29\n * Doesn't quit anymore on single `q` press while in tab mode (https://github.com/zellij-org/zellij/pull/342)\n", "diff --git a/package.json b/package.json\nindex c8051d2..b0a97fb 100644\n--- a/package.json\n+++ b/package.json\n@@ -60,6 +60,7 @@\n \"babel-cli\": \"^6.16.0\",\n \"babel-core\": \"^6.16.0\",\n \"babel-eslint\": \"^7.0.0\",\n+ \"babel-loader\": \"^6.2.5\",\n \"babel-plugin-transform-class-properties\": \"^6.10.2\",\n \"babel-plugin-transform-flow-strip-types\": \"^6.14.0\",\n \"babel-preset-es2015-node6\": \"^0.3.0\",\n@@ -82,6 +83,7 @@\n \"eslint-plugin-react\": \"^6.3.0\",\n \"flow-bin\": \"^0.33.0\",\n \"jsdom\": \"^9.4.2\",\n+ \"json-loader\": \"^0.5.4\",\n \"jsx-chai\": \"^4.0.0\",\n \"mocha\": \"^3.0.2\",\n \"mock-require\": \"^1.3.0\",\n@@ -91,6 +93,8 @@\n \"rimraf\": \"^2.5.2\",\n \"sinon\": \"^1.17.6\",\n \"sinon-chai\": \"^2.8.0\",\n- \"watch\": \"^1.0.0\"\n+ \"source-map-support\": \"^0.4.3\",\n+ \"watch\": \"^1.0.0\",\n+ \"webpack\": \"^1.13.2\"\n }\n }\ndiff --git a/webpack.config.js b/webpack.config.js\nnew file mode 100644\nindex 0000000..0ca6da1\n--- /dev/null\n+++ b/webpack.config.js\n@@ -0,0 +1,44 @@\n+const webpack = require('webpack');\n+const path = require('path');\n+const fs = require('fs');\n+\n+const nodeModules = {\n+ zmq: 'commonjs zmq',\n+ jmp: 'commonjs jmp',\n+ github: 'commonjs github',\n+};\n+\n+module.exports = {\n+ entry: './src/notebook/index.js',\n+ target: 'electron-renderer',\n+ output: {\n+ path: path.join(__dirname, 'app', 'build'),\n+ filename: 'webpacked-notebook.js'\n+ },\n+ module: {\n+ loaders: [\n+ { test: /\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\n+ { test: /\\.json$/, loader: 'json-loader' },\n+ ]\n+ },\n+ resolve: {\n+ extensions: ['', '.js', '.jsx'],\n+ root: path.join(__dirname, 'app'),\n+ // Webpack 1\n+ modulesDirectories: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ path.resolve(__dirname, 'node_modules'),\n+ ],\n+ // Webpack 2\n+ modules: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ ],\n+ },\n+ externals: nodeModules,\n+ plugins: [\n+ new webpack.IgnorePlugin(/\\.(css|less)$/),\n+ new webpack.BannerPlugin('require(\"source-map-support\").install();',\n+ { raw: true, entryOnly: false })\n+ ],\n+ devtool: 'sourcemap'\n+};\n", "diff --git a/packages/nc-gui/components/dashboard/settings/DataSources.vue b/packages/nc-gui/components/dashboard/settings/DataSources.vue\nindex 78caa98..0ed5df9 100644\n--- a/packages/nc-gui/components/dashboard/settings/DataSources.vue\n+++ b/packages/nc-gui/components/dashboard/settings/DataSources.vue\n@@ -351,59 +351,78 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- v-if=\"!sources[0].is_meta && !sources[0].is_local\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- size=\"small\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n+ <NcTooltip v-if=\"!sources[0].is_meta && !sources[0].is_local\" overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ size=\"small\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.audit') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.audit') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.audit') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n <div class=\"ds-table-col ds-table-crud\">\n@@ -450,67 +469,92 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ </div>\n+ </div>\n+ <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.edit') }}\n+ </template>\n <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n </NcButton>\n+ </NcTooltip>\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.delete') }}\n+ </template>\n <NcButton\n v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ @click=\"openDeleteBase(source)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n </NcButton>\n- </div>\n- </div>\n- <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n- >\n- <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n- </NcButton>\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"openDeleteBase(source)\"\n- >\n- <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n- </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n </template>\ndiff --git a/packages/nc-gui/components/nc/Tooltip.vue b/packages/nc-gui/components/nc/Tooltip.vue\nindex 0810b8b..97b159e 100644\n--- a/packages/nc-gui/components/nc/Tooltip.vue\n+++ b/packages/nc-gui/components/nc/Tooltip.vue\n@@ -12,6 +12,7 @@ interface Props {\n disabled?: boolean\n placement?: TooltipPlacement | undefined\n hideOnClick?: boolean\n+ overlayClassName?: string\n }\n \n const props = defineProps<Props>()\n@@ -36,6 +37,8 @@ const attrs = useAttrs()\n \n const isKeyPressed = ref(false)\n \n+const overlayClassName = computed(() => props.overlayClassName)\n+\n onKeyStroke(\n (e) => e.key === modifierKey.value,\n (e) => {\n@@ -100,7 +103,7 @@ const onClick = () => {\n <template>\n <a-tooltip\n v-model:visible=\"showTooltip\"\n- :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'}`\"\n+ :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'} ${overlayClassName}`\"\n :overlay-style=\"tooltipStyle\"\n arrow-point-at-center\n :trigger=\"[]\"\n", "diff --git a/src/js/core/factories/Grid.js b/src/js/core/factories/Grid.js\nindex dcf10af..2be7842 100644\n--- a/src/js/core/factories/Grid.js\n+++ b/src/js/core/factories/Grid.js\n@@ -1525,7 +1525,7 @@ angular.module('ui.grid')\n var oldHeaderHeight = container.headerHeight;\n var headerHeight = gridUtil.outerElementHeight(container.header);\n \n- container.headerHeight = headerHeight;\n+ container.headerHeight = parseInt(headerHeight, 10);\n \n if (oldHeaderHeight !== headerHeight) {\n rebuildStyles = true;\n@@ -1534,7 +1534,9 @@ angular.module('ui.grid')\n // Get the \"inner\" header height, that is the height minus the top and bottom borders, if present. We'll use it to make sure all the headers have a consistent height\n var topBorder = gridUtil.getBorderSize(container.header, 'top');\n var bottomBorder = gridUtil.getBorderSize(container.header, 'bottom');\n- var innerHeaderHeight = headerHeight - topBorder - bottomBorder;\n+ var innerHeaderHeight = parseInt(headerHeight - topBorder - bottomBorder, 10);\n+\n+ innerHeaderHeight = innerHeaderHeight < 0 ? 0 : innerHeaderHeight;\n \n container.innerHeaderHeight = innerHeaderHeight;\n \ndiff --git a/src/js/core/services/ui-grid-util.js b/src/js/core/services/ui-grid-util.js\nindex 2c32cbe..cc7c36c 100644\n--- a/src/js/core/services/ui-grid-util.js\n+++ b/src/js/core/services/ui-grid-util.js\n@@ -757,6 +757,8 @@ module.service('gridUtil', ['$log', '$window', '$document', '$http', '$templateC\n borderType = 'border';\n }\n \n+ borderType += 'Width';\n+\n var val = parseInt(styles[borderType], 10);\n \n if (isNaN(val)) {\ndiff --git a/src/less/header.less b/src/less/header.less\nindex 5468a43..de8ff0b 100644\n--- a/src/less/header.less\n+++ b/src/less/header.less\n@@ -7,6 +7,7 @@\n \n .ui-grid-header {\n border-bottom: 1px solid @borderColor;\n+ box-sizing: content-box;;\n }\n \n .ui-grid-top-panel {\n", "diff --git a/src/app/settings.rs b/src/app/settings.rs\nindex e0e5ed1..60584f4 100644\n--- a/src/app/settings.rs\n+++ b/src/app/settings.rs\n@@ -33,76 +33,26 @@ impl AppFlags {\n AppFlags(NEEDS_LONG_VERSION | NEEDS_LONG_HELP | NEEDS_SC_HELP | UTF8_NONE)\n }\n \n- pub fn set(&mut self, s: AppSettings) {\n- match s {\n- AppSettings::SubcommandsNegateReqs => self.0.insert(SC_NEGATE_REQS),\n- AppSettings::VersionlessSubcommands => self.0.insert(VERSIONLESS_SC),\n- AppSettings::SubcommandRequired => self.0.insert(SC_REQUIRED),\n- AppSettings::ArgRequiredElseHelp => self.0.insert(A_REQUIRED_ELSE_HELP),\n- AppSettings::GlobalVersion => self.0.insert(GLOBAL_VERSION),\n- AppSettings::UnifiedHelpMessage => self.0.insert(UNIFIED_HELP),\n- AppSettings::WaitOnError => self.0.insert(WAIT_ON_ERROR),\n- AppSettings::SubcommandRequiredElseHelp => self.0.insert(SC_REQUIRED_ELSE_HELP),\n- AppSettings::NeedsLongHelp => self.0.insert(NEEDS_LONG_HELP),\n- AppSettings::NeedsLongVersion => self.0.insert(NEEDS_LONG_VERSION),\n- AppSettings::NeedsSubcommandHelp => self.0.insert(NEEDS_SC_HELP),\n- AppSettings::DisableVersion => self.0.insert(DISABLE_VERSION),\n- AppSettings::Hidden => self.0.insert(HIDDEN),\n- AppSettings::TrailingVarArg => self.0.insert(TRAILING_VARARG),\n- AppSettings::NoBinaryName => self.0.insert(NO_BIN_NAME),\n- AppSettings::AllowExternalSubcommands => self.0.insert(ALLOW_UNK_SC),\n- AppSettings::StrictUtf8 => self.0.insert(UTF8_STRICT),\n- AppSettings::AllowInvalidUtf8 => self.0.insert(UTF8_NONE),\n- AppSettings::AllowLeadingHyphen => self.0.insert(LEADING_HYPHEN),\n- }\n- }\n-\n- pub fn unset(&mut self, s: AppSettings) {\n- match s {\n- AppSettings::SubcommandsNegateReqs => self.0.remove(SC_NEGATE_REQS),\n- AppSettings::VersionlessSubcommands => self.0.remove(VERSIONLESS_SC),\n- AppSettings::SubcommandRequired => self.0.remove(SC_REQUIRED),\n- AppSettings::ArgRequiredElseHelp => self.0.remove(A_REQUIRED_ELSE_HELP),\n- AppSettings::GlobalVersion => self.0.remove(GLOBAL_VERSION),\n- AppSettings::UnifiedHelpMessage => self.0.remove(UNIFIED_HELP),\n- AppSettings::WaitOnError => self.0.remove(WAIT_ON_ERROR),\n- AppSettings::SubcommandRequiredElseHelp => self.0.remove(SC_REQUIRED_ELSE_HELP),\n- AppSettings::NeedsLongHelp => self.0.remove(NEEDS_LONG_HELP),\n- AppSettings::NeedsLongVersion => self.0.remove(NEEDS_LONG_VERSION),\n- AppSettings::NeedsSubcommandHelp => self.0.remove(NEEDS_SC_HELP),\n- AppSettings::DisableVersion => self.0.remove(DISABLE_VERSION),\n- AppSettings::Hidden => self.0.remove(HIDDEN),\n- AppSettings::TrailingVarArg => self.0.remove(TRAILING_VARARG),\n- AppSettings::NoBinaryName => self.0.remove(NO_BIN_NAME),\n- AppSettings::AllowExternalSubcommands => self.0.remove(ALLOW_UNK_SC),\n- AppSettings::StrictUtf8 => self.0.remove(UTF8_STRICT),\n- AppSettings::AllowInvalidUtf8 => self.0.remove(UTF8_NONE),\n- AppSettings::AllowLeadingHyphen => self.0.remove(LEADING_HYPHEN),\n- }\n- }\n-\n- pub fn is_set(&self, s: AppSettings) -> bool {\n- match s {\n- AppSettings::SubcommandsNegateReqs => self.0.contains(SC_NEGATE_REQS),\n- AppSettings::VersionlessSubcommands => self.0.contains(VERSIONLESS_SC),\n- AppSettings::SubcommandRequired => self.0.contains(SC_REQUIRED),\n- AppSettings::ArgRequiredElseHelp => self.0.contains(A_REQUIRED_ELSE_HELP),\n- AppSettings::GlobalVersion => self.0.contains(GLOBAL_VERSION),\n- AppSettings::UnifiedHelpMessage => self.0.contains(UNIFIED_HELP),\n- AppSettings::WaitOnError => self.0.contains(WAIT_ON_ERROR),\n- AppSettings::SubcommandRequiredElseHelp => self.0.contains(SC_REQUIRED_ELSE_HELP),\n- AppSettings::NeedsLongHelp => self.0.contains(NEEDS_LONG_HELP),\n- AppSettings::NeedsLongVersion => self.0.contains(NEEDS_LONG_VERSION),\n- AppSettings::NeedsSubcommandHelp => self.0.contains(NEEDS_SC_HELP),\n- AppSettings::DisableVersion => self.0.contains(DISABLE_VERSION),\n- AppSettings::Hidden => self.0.contains(HIDDEN),\n- AppSettings::TrailingVarArg => self.0.contains(TRAILING_VARARG),\n- AppSettings::NoBinaryName => self.0.contains(NO_BIN_NAME),\n- AppSettings::AllowExternalSubcommands => self.0.contains(ALLOW_UNK_SC),\n- AppSettings::StrictUtf8 => self.0.contains(UTF8_STRICT),\n- AppSettings::AllowInvalidUtf8 => self.0.contains(UTF8_NONE),\n- AppSettings::AllowLeadingHyphen => self.0.contains(LEADING_HYPHEN),\n- }\n+ impl_settings! { AppSettings,\n+ SubcommandsNegateReqs => SC_NEGATE_REQS,\n+ VersionlessSubcommands => VERSIONLESS_SC,\n+ SubcommandRequired => SC_REQUIRED,\n+ ArgRequiredElseHelp => A_REQUIRED_ELSE_HELP,\n+ GlobalVersion => GLOBAL_VERSION,\n+ UnifiedHelpMessage => UNIFIED_HELP,\n+ WaitOnError => WAIT_ON_ERROR,\n+ SubcommandRequiredElseHelp => SC_REQUIRED_ELSE_HELP,\n+ NeedsLongHelp => NEEDS_LONG_HELP,\n+ NeedsLongVersion => NEEDS_LONG_VERSION,\n+ NeedsSubcommandHelp => NEEDS_SC_HELP,\n+ DisableVersion => DISABLE_VERSION,\n+ Hidden => HIDDEN,\n+ TrailingVarArg => TRAILING_VARARG,\n+ NoBinaryName => NO_BIN_NAME,\n+ AllowExternalSubcommands => ALLOW_UNK_SC,\n+ StrictUtf8 => UTF8_STRICT,\n+ AllowInvalidUtf8 => UTF8_NONE,\n+ AllowLeadingHyphen => LEADING_HYPHEN\n }\n }\n \ndiff --git a/src/args/settings.rs b/src/args/settings.rs\nindex f2f1384..effc18c 100644\n--- a/src/args/settings.rs\n+++ b/src/args/settings.rs\n@@ -21,40 +21,14 @@ impl ArgFlags {\n ArgFlags(EMPTY_VALS | USE_DELIM)\n }\n \n- pub fn set(&mut self, s: ArgSettings) {\n- match s {\n- ArgSettings::Required => self.0.insert(REQUIRED),\n- ArgSettings::Multiple => self.0.insert(MULTIPLE),\n- ArgSettings::EmptyValues => self.0.insert(EMPTY_VALS),\n- ArgSettings::Global => self.0.insert(GLOBAL),\n- ArgSettings::Hidden => self.0.insert(HIDDEN),\n- ArgSettings::TakesValue => self.0.insert(TAKES_VAL),\n- ArgSettings::UseValueDelimiter => self.0.insert(USE_DELIM),\n- }\n- }\n-\n- pub fn unset(&mut self, s: ArgSettings) {\n- match s {\n- ArgSettings::Required => self.0.remove(REQUIRED),\n- ArgSettings::Multiple => self.0.remove(MULTIPLE),\n- ArgSettings::EmptyValues => self.0.remove(EMPTY_VALS),\n- ArgSettings::Global => self.0.remove(GLOBAL),\n- ArgSettings::Hidden => self.0.remove(HIDDEN),\n- ArgSettings::TakesValue => self.0.remove(TAKES_VAL),\n- ArgSettings::UseValueDelimiter => self.0.remove(USE_DELIM),\n- }\n- }\n-\n- pub fn is_set(&self, s: ArgSettings) -> bool {\n- match s {\n- ArgSettings::Required => self.0.contains(REQUIRED),\n- ArgSettings::Multiple => self.0.contains(MULTIPLE),\n- ArgSettings::EmptyValues => self.0.contains(EMPTY_VALS),\n- ArgSettings::Global => self.0.contains(GLOBAL),\n- ArgSettings::Hidden => self.0.contains(HIDDEN),\n- ArgSettings::TakesValue => self.0.contains(TAKES_VAL),\n- ArgSettings::UseValueDelimiter => self.0.contains(USE_DELIM),\n- }\n+ impl_settings!{ArgSettings,\n+ Required => REQUIRED,\n+ Multiple => MULTIPLE,\n+ EmptyValues => EMPTY_VALS,\n+ Global => GLOBAL,\n+ Hidden => HIDDEN,\n+ TakesValue => TAKES_VAL,\n+ UseValueDelimiter => USE_DELIM\n }\n }\n \ndiff --git a/src/macros.rs b/src/macros.rs\nindex 47675ac..29d5382 100644\n--- a/src/macros.rs\n+++ b/src/macros.rs\n@@ -1,3 +1,25 @@\n+macro_rules! impl_settings {\n+ ($n:ident, $($v:ident => $c:ident),+) => {\n+ pub fn set(&mut self, s: $n) {\n+ match s {\n+ $($n::$v => self.0.insert($c)),+\n+ }\n+ }\n+\n+ pub fn unset(&mut self, s: $n) {\n+ match s {\n+ $($n::$v => self.0.remove($c)),+\n+ }\n+ }\n+\n+ pub fn is_set(&self, s: $n) -> bool {\n+ match s {\n+ $($n::$v => self.0.contains($c)),+\n+ }\n+ }\n+ };\n+}\n+\n // Convenience for writing to stderr thanks to https://github.com/BurntSushi\n macro_rules! wlnerr(\n ($($arg:tt)*) => ({\n"]
5
["099861ff5b0f83773ca0af4c70e6e39be3b0336c", "4ab28fc2e63e975a0c77e18ae644f34fa5f8771a", "a75538817c20fc4132718fd7b586bf835a5795e3", "174f25214caa10ec643db6c81aaa0f3511bf78f4", "86f3e3397594f8312226c5a193608a054087805c"]
["docs", "build", "feat", "fix", "refactor"]
Fix readme Signed-off-by: Ben Johnson <[email protected]>,add page balckwhitelist and pdf,better tested publishing flow,exclude github.io from link checking to avoid rate limiting,do not pin time in tests but only skip ahead related to #573
["diff --git a/README.md b/README.md\nindex 587d655..da746bb 100644\n--- a/README.md\n+++ b/README.md\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - [docker][docs.sources.docker], [file][docs.sources.file], [journald][docs.sources.journald], [kafka][docs.sources.kafka]\n+* [**Transforms**][docs.transforms] - [json_parser][docs.transforms.json_parser], [log_to_metric][docs.transforms.log_to_metric], [lua][docs.transforms.lua], [regex_parser][docs.transforms.regex_parser]\n+* [**Sinks**][docs.sinks] - [aws_cloudwatch_logs][docs.sinks.aws_cloudwatch_logs], [aws_cloudwatch_metrics][docs.sinks.aws_cloudwatch_metrics], [aws_kinesis_streams][docs.sinks.aws_kinesis_streams], [aws_s3][docs.sinks.aws_s3], [clickhouse][docs.sinks.clickhouse], [elasticsearch][docs.sinks.elasticsearch], and [15 more][docs.sinks]\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,88 +110,6 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.vector.dev | sh\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`docker`**][docs.sources.docker] | Ingests data through the docker engine daemon and outputs [`log`][docs.data-model#log] events. |\n-| [**`file`**][docs.sources.file] | Ingests data through one or more local files and outputs [`log`][docs.data-model#log] events. |\n-| [**`journald`**][docs.sources.journald] | Ingests data through log records from journald and outputs [`log`][docs.data-model#log] events. |\n-| [**`kafka`**][docs.sources.kafka] | Ingests data through Kafka 0.9 or later and outputs [`log`][docs.data-model#log] events. |\n-| [**`statsd`**][docs.sources.statsd] | Ingests data through the StatsD UDP protocol and outputs [`metric`][docs.data-model#metric] events. |\n-| [**`stdin`**][docs.sources.stdin] | Ingests data through standard input (STDIN) and outputs [`log`][docs.data-model#log] events. |\n-| [**`syslog`**][docs.sources.syslog] | Ingests data through the Syslog 5424 protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`tcp`**][docs.sources.tcp] | Ingests data through the TCP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`udp`**][docs.sources.udp] | Ingests data through the UDP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`vector`**][docs.sources.vector] | Ingests data through another upstream [`vector` sink][docs.sinks.vector] and outputs [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events. |\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`add_fields`**][docs.transforms.add_fields] | Accepts [`log`][docs.data-model#log] events and allows you to add one or more log fields. |\n-| [**`add_tags`**][docs.transforms.add_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to add one or more metric tags. |\n-| [**`coercer`**][docs.transforms.coercer] | Accepts [`log`][docs.data-model#log] events and allows you to coerce log fields into fixed types. |\n-| [**`field_filter`**][docs.transforms.field_filter] | Accepts [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events and allows you to filter events by a log field's value. |\n-| [**`grok_parser`**][docs.transforms.grok_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value with [Grok][urls.grok]. |\n-| [**`json_parser`**][docs.transforms.json_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value as JSON. |\n-| [**`log_to_metric`**][docs.transforms.log_to_metric] | Accepts [`log`][docs.data-model#log] events and allows you to convert logs into one or more metrics. |\n-| [**`lua`**][docs.transforms.lua] | Accepts [`log`][docs.data-model#log] events and allows you to transform events with a full embedded [Lua][urls.lua] engine. |\n-| [**`regex_parser`**][docs.transforms.regex_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field's value with a [Regular Expression][urls.regex]. |\n-| [**`remove_fields`**][docs.transforms.remove_fields] | Accepts [`log`][docs.data-model#log] events and allows you to remove one or more log fields. |\n-| [**`remove_tags`**][docs.transforms.remove_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to remove one or more metric tags. |\n-| [**`sampler`**][docs.transforms.sampler] | Accepts [`log`][docs.data-model#log] events and allows you to sample events with a configurable rate. |\n-| [**`split`**][docs.transforms.split] | Accepts [`log`][docs.data-model#log] events and allows you to split a field's value on a given separator and zip the tokens into ordered field names. |\n-| [**`tokenizer`**][docs.transforms.tokenizer] | Accepts [`log`][docs.data-model#log] events and allows you to tokenize a field's value by splitting on white space, ignoring special wrapping characters, and zip the tokens into ordered field names. |\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`aws_cloudwatch_logs`**][docs.sinks.aws_cloudwatch_logs] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS CloudWatch Logs][urls.aws_cw_logs] via the [`PutLogEvents` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html). |\n-| [**`aws_cloudwatch_metrics`**][docs.sinks.aws_cloudwatch_metrics] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [AWS CloudWatch Metrics][urls.aws_cw_metrics] via the [`PutMetricData` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html). |\n-| [**`aws_kinesis_streams`**][docs.sinks.aws_kinesis_streams] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS Kinesis Data Stream][urls.aws_kinesis_data_streams] via the [`PutRecords` API endpoint](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html). |\n-| [**`aws_s3`**][docs.sinks.aws_s3] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS S3][urls.aws_s3] via the [`PutObject` API endpoint](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html). |\n-| [**`blackhole`**][docs.sinks.blackhole] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to a blackhole that simply discards data, designed for testing and benchmarking purposes. |\n-| [**`clickhouse`**][docs.sinks.clickhouse] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Clickhouse][urls.clickhouse] via the [`HTTP` Interface][urls.clickhouse_http]. |\n-| [**`console`**][docs.sinks.console] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to [standard output streams][urls.standard_streams], such as `STDOUT` and `STDERR`. |\n-| [**`datadog_metrics`**][docs.sinks.datadog_metrics] | [Batches](#buffers-and-batches) [`metric`][docs.data-model#metric] events to [Datadog][urls.datadog] metrics service using [HTTP API](https://docs.datadoghq.com/api/?lang=bash#metrics). |\n-| [**`elasticsearch`**][docs.sinks.elasticsearch] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Elasticsearch][urls.elasticsearch] via the [`_bulk` API endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html). |\n-| [**`file`**][docs.sinks.file] | [Streams](#streaming) [`log`][docs.data-model#log] events to a file. |\n-| [**`http`**][docs.sinks.http] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a generic HTTP endpoint. |\n-| [**`kafka`**][docs.sinks.kafka] | [Streams](#streaming) [`log`][docs.data-model#log] events to [Apache Kafka][urls.kafka] via the [Kafka protocol][urls.kafka_protocol]. |\n-| [**`prometheus`**][docs.sinks.prometheus] | [Exposes](#exposing-and-scraping) [`metric`][docs.data-model#metric] events to [Prometheus][urls.prometheus] metrics service. |\n-| [**`splunk_hec`**][docs.sinks.splunk_hec] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a [Splunk HTTP Event Collector][urls.splunk_hec]. |\n-| [**`statsd`**][docs.sinks.statsd] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [StatsD][urls.statsd] metrics service. |\n-| [**`tcp`**][docs.sinks.tcp] | [Streams](#streaming) [`log`][docs.data-model#log] events to a TCP connection. |\n-| [**`vector`**][docs.sinks.vector] | [Streams](#streaming) [`log`][docs.data-model#log] events to another downstream [`vector` source][docs.sources.vector]. |\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright 2019, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\n@@ -200,8 +123,6 @@ the License.\n [docs.configuration]: https://vector.dev/docs/setup/configuration\n [docs.containers]: https://vector.dev/docs/setup/installation/containers\n [docs.correctness]: https://vector.dev/docs/about/correctness\n-[docs.data-model#log]: https://vector.dev/docs/about/data-model#log\n-[docs.data-model#metric]: https://vector.dev/docs/about/data-model#metric\n [docs.data-model.log]: https://vector.dev/docs/about/data-model/log\n [docs.data-model.metric]: https://vector.dev/docs/about/data-model/metric\n [docs.data_model]: https://vector.dev/docs/about/data-model\n@@ -209,6 +130,7 @@ the License.\n [docs.from-archives]: https://vector.dev/docs/setup/installation/manual/from-archives\n [docs.from-source]: https://vector.dev/docs/setup/installation/manual/from-source\n [docs.guarantees]: https://vector.dev/docs/about/guarantees\n+[docs.guides.getting_started]: https://vector.dev/docs/setup/guides/getting-started\n [docs.guides]: https://vector.dev/docs/setup/guides\n [docs.installation]: https://vector.dev/docs/setup/installation\n [docs.monitoring]: https://vector.dev/docs/administration/monitoring\n@@ -224,72 +146,25 @@ the License.\n [docs.sinks.aws_cloudwatch_metrics]: https://vector.dev/docs/reference/sinks/aws_cloudwatch_metrics\n [docs.sinks.aws_kinesis_streams]: https://vector.dev/docs/reference/sinks/aws_kinesis_streams\n [docs.sinks.aws_s3]: https://vector.dev/docs/reference/sinks/aws_s3\n-[docs.sinks.blackhole]: https://vector.dev/docs/reference/sinks/blackhole\n [docs.sinks.clickhouse]: https://vector.dev/docs/reference/sinks/clickhouse\n-[docs.sinks.console]: https://vector.dev/docs/reference/sinks/console\n-[docs.sinks.datadog_metrics]: https://vector.dev/docs/reference/sinks/datadog_metrics\n [docs.sinks.elasticsearch]: https://vector.dev/docs/reference/sinks/elasticsearch\n-[docs.sinks.file]: https://vector.dev/docs/reference/sinks/file\n-[docs.sinks.http]: https://vector.dev/docs/reference/sinks/http\n-[docs.sinks.kafka]: https://vector.dev/docs/reference/sinks/kafka\n-[docs.sinks.prometheus]: https://vector.dev/docs/reference/sinks/prometheus\n-[docs.sinks.splunk_hec]: https://vector.dev/docs/reference/sinks/splunk_hec\n-[docs.sinks.statsd]: https://vector.dev/docs/reference/sinks/statsd\n-[docs.sinks.tcp]: https://vector.dev/docs/reference/sinks/tcp\n-[docs.sinks.vector]: https://vector.dev/docs/reference/sinks/vector\n [docs.sinks]: https://vector.dev/docs/reference/sinks\n [docs.sources.docker]: https://vector.dev/docs/reference/sources/docker\n [docs.sources.file]: https://vector.dev/docs/reference/sources/file\n [docs.sources.journald]: https://vector.dev/docs/reference/sources/journald\n [docs.sources.kafka]: https://vector.dev/docs/reference/sources/kafka\n-[docs.sources.statsd]: https://vector.dev/docs/reference/sources/statsd\n-[docs.sources.stdin]: https://vector.dev/docs/reference/sources/stdin\n-[docs.sources.syslog]: https://vector.dev/docs/reference/sources/syslog\n-[docs.sources.tcp]: https://vector.dev/docs/reference/sources/tcp\n-[docs.sources.udp]: https://vector.dev/docs/reference/sources/udp\n-[docs.sources.vector]: https://vector.dev/docs/reference/sources/vector\n [docs.sources]: https://vector.dev/docs/reference/sources\n [docs.topologies]: https://vector.dev/docs/setup/deployment/topologies\n-[docs.transforms.add_fields]: https://vector.dev/docs/reference/transforms/add_fields\n-[docs.transforms.add_tags]: https://vector.dev/docs/reference/transforms/add_tags\n-[docs.transforms.coercer]: https://vector.dev/docs/reference/transforms/coercer\n-[docs.transforms.field_filter]: https://vector.dev/docs/reference/transforms/field_filter\n-[docs.transforms.grok_parser]: https://vector.dev/docs/reference/transforms/grok_parser\n [docs.transforms.json_parser]: https://vector.dev/docs/reference/transforms/json_parser\n [docs.transforms.log_to_metric]: https://vector.dev/docs/reference/transforms/log_to_metric\n [docs.transforms.lua]: https://vector.dev/docs/reference/transforms/lua\n [docs.transforms.regex_parser]: https://vector.dev/docs/reference/transforms/regex_parser\n-[docs.transforms.remove_fields]: https://vector.dev/docs/reference/transforms/remove_fields\n-[docs.transforms.remove_tags]: https://vector.dev/docs/reference/transforms/remove_tags\n-[docs.transforms.sampler]: https://vector.dev/docs/reference/transforms/sampler\n-[docs.transforms.split]: https://vector.dev/docs/reference/transforms/split\n-[docs.transforms.tokenizer]: https://vector.dev/docs/reference/transforms/tokenizer\n [docs.transforms]: https://vector.dev/docs/reference/transforms\n [docs.updating]: https://vector.dev/docs/administration/updating\n [docs.use_cases]: https://vector.dev/docs/use_cases\n [docs.validating]: https://vector.dev/docs/administration/validating\n-[urls.aws_cw_logs]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html\n-[urls.aws_cw_metrics]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/working_with_metrics.html\n-[urls.aws_kinesis_data_streams]: https://aws.amazon.com/kinesis/data-streams/\n-[urls.aws_s3]: https://aws.amazon.com/s3/\n-[urls.clickhouse]: https://clickhouse.yandex/\n-[urls.clickhouse_http]: https://clickhouse.yandex/docs/en/interfaces/http/\n-[urls.datadog]: https://www.datadoghq.com\n-[urls.elasticsearch]: https://www.elastic.co/products/elasticsearch\n-[urls.grok]: http://grokdebug.herokuapp.com/\n-[urls.kafka]: https://kafka.apache.org/\n-[urls.kafka_protocol]: https://kafka.apache.org/protocol\n-[urls.lua]: https://www.lua.org/\n [urls.mailing_list]: https://vector.dev/mailing_list/\n-[urls.new_sink]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_source]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_transform]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.prometheus]: https://prometheus.io/\n-[urls.regex]: https://en.wikipedia.org/wiki/Regular_expression\n [urls.rust]: https://www.rust-lang.org/\n-[urls.splunk_hec]: http://dev.splunk.com/view/event-collector/SP-CAAAE6M\n-[urls.standard_streams]: https://en.wikipedia.org/wiki/Standard_streams\n-[urls.statsd]: https://github.com/statsd/statsd\n [urls.test_harness]: https://github.com/timberio/vector-test-harness/\n [urls.v0.5.0]: https://github.com/timberio/vector/releases/tag/v0.5.0\n [urls.vector_changelog]: https://github.com/timberio/vector/blob/master/CHANGELOG.md\ndiff --git a/README.md.erb b/README.md.erb\nindex 3b14aa0..cc241eb 100644\n--- a/README.md.erb\n+++ b/README.md.erb\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - <%= common_component_links(:source) %>\n+* [**Transforms**][docs.transforms] - <%= common_component_links(:transform) %>\n+* [**Sinks**][docs.sinks] - <%= common_component_links(:sink) %>\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,44 +110,6 @@ Run the following in your terminal, then follow the on-screen instructions.\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-<%= components_table(metadata.sources.to_h.values.sort) %>\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-<%= components_table(metadata.transforms.to_h.values.sort) %>\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-<%= components_table(metadata.sinks.to_h.values.sort) %>\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright <%= Time.now.year %>, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\ndiff --git a/scripts/generate/templates.rb b/scripts/generate/templates.rb\nindex e5e7ce7..c793ae0 100644\n--- a/scripts/generate/templates.rb\n+++ b/scripts/generate/templates.rb\n@@ -89,6 +89,23 @@ class Templates\n render(\"#{partials_path}/_commit_type_toc_item.md\", binding).gsub(/,$/, \"\")\n end\n \n+ def common_component_links(type, limit = 5)\n+ common = metadata.send(\"#{type.to_s.pluralize}_list\").select(&:common?)\n+\n+ links =\n+ common[0..limit].collect do |component|\n+ \"[#{component.name}][docs.#{type.to_s.pluralize}.#{component.name}]\"\n+ end\n+\n+ num_leftover = common.size - links.size\n+\n+ if num_leftover > 0\n+ links << \"and [15 more][docs.#{type.to_s.pluralize}]\"\n+ end\n+\n+ links.join(\", \")\n+ end\n+\n def component_config_example(component)\n render(\"#{partials_path}/_component_config_example.md\", binding).strip\n end\ndiff --git a/scripts/util/metadata/component.rb b/scripts/util/metadata/component.rb\nindex 0873b2e..4dc5650 100644\n--- a/scripts/util/metadata/component.rb\n+++ b/scripts/util/metadata/component.rb\n@@ -9,6 +9,7 @@ class Component\n include Comparable\n \n attr_reader :beta,\n+ :common,\n :function_category,\n :id,\n :name,\n@@ -18,6 +19,7 @@ class Component\n \n def initialize(hash)\n @beta = hash[\"beta\"] == true\n+ @common = hash[\"common\"] == true\n @function_category = hash.fetch(\"function_category\")\n @name = hash.fetch(\"name\")\n @type ||= self.class.name.downcase\n@@ -71,6 +73,10 @@ class Component\n beta == true\n end\n \n+ def common?\n+ common == true\n+ end\n+\n def context_options\n options_list.select(&:context?)\n end\ndiff --git a/website/src/components/VectorComponents/index.js b/website/src/components/VectorComponents/index.js\nindex b6c5c13..d3c9adf 100644\n--- a/website/src/components/VectorComponents/index.js\n+++ b/website/src/components/VectorComponents/index.js\n@@ -154,7 +154,7 @@ function VectorComponents(props) {\n //\n \n const [onlyAtLeastOnce, setOnlyAtLeastOnce] = useState(queryObj['at-least-once'] == 'true');\n- const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['providers']));\n+ const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['functions']));\n const [onlyLog, setOnlyLog] = useState(queryObj['log'] == 'true');\n const [onlyMetric, setOnlyMetric] = useState(queryObj['metric'] == 'true');\n const [onlyProductionReady, setOnlyProductionReady] = useState(queryObj['prod-ready'] == 'true');\n", "diff --git a/src/_locales/common/messages.json b/src/_locales/common/messages.json\nindex e8524ac..3a596d6 100644\n--- a/src/_locales/common/messages.json\n+++ b/src/_locales/common/messages.json\n@@ -4,11 +4,21 @@\n \"zh_CN\": \"\u6dfb\u52a0\",\n \"zh_TW\": \"\u65b0\u589e\"\n },\n+ \"blacklist\": {\n+ \"en\": \"Blacklist\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\"\n+ },\n \"cancel\": {\n \"en\": \"Cancel\",\n \"zh_CN\": \"\u53d6\u6d88\",\n \"zh_TW\": \"\u53d6\u6d88\"\n },\n+ \"changes_confirm\": {\n+ \"en\": \"Changes not saved. Close anyway?\",\n+ \"zh_CN\": \"\u4fee\u6539\u672a\u4fdd\u5b58\u3002\u786e\u8ba4\u5173\u95ed\uff1f\",\n+ \"zh_TW\": \"\u4fee\u6539\u672a\u4fdd\u5b58\u3002\u78ba\u5b9a\u95dc\u9589\uff1f\"\n+ },\n \"confirm\": {\n \"en\": \"Confirm\",\n \"zh_CN\": \"\u786e\u8ba4\",\n@@ -93,5 +103,10 @@\n \"en\": \"words\",\n \"zh_CN\": \"\u4e2a\",\n \"zh_TW\": \"\u4e2a\"\n+ },\n+ \"whitelist\": {\n+ \"en\": \"Whitelist\",\n+ \"zh_CN\": \"\u767d\u540d\u5355\",\n+ \"zh_TW\": \"\u767d\u540d\u55ae\"\n }\n }\ndiff --git a/src/_locales/options/messages.json b/src/_locales/options/messages.json\nindex ada2488..e7d699a 100644\n--- a/src/_locales/options/messages.json\n+++ b/src/_locales/options/messages.json\n@@ -119,6 +119,11 @@\n \"zh_CN\": \"\u53cd\u9988\u95ee\u9898\",\n \"zh_TW\": \"\u8edf\u9ad4\u4f7f\u7528\u7591\u554f\u548c\u5efa\u8a00\"\n },\n+ \"match_pattern_description\": {\n+ \"en\": \"Specify URLs as match patterns. <a href=\\\"https://developer.mozilla.org/en-US/Add-ons/WebExtensions/Match_patterns#Examples\\\" target=\\\"_blank\\\">Examples</a>. Empty fields will be removed.\",\n+ \"zh_CN\": \"\u7f51\u5740\u652f\u6301\u5339\u914d\u6a21\u5f0f\uff08<a href=\\\"https://developer.mozilla.org/zh-CN/Add-ons/WebExtensions/Match_patterns#\u8303\u4f8b\\\" target=\\\"_blank\\\">\u4f8b\u5b50</a>\uff09\u3002\u7559\u7a7a\u4fdd\u5b58\u5373\u53ef\u6e05\u9664\u3002\",\n+ \"zh_TW\": \"\u7db2\u5740\u652f\u63f4\u5339\u914d\u6a21\u5f0f\uff08<a href=\\\"https://developer.mozilla.org/zh-CN/Add-ons/WebExtensions/Match_patterns#\u8303\u4f8b\\\" target=\\\"_blank\\\">\u4f8b\u5b50</a>\uff09\u3002\u7559\u7a7a\u5132\u5b58\u5373\u53ef\u6e05\u9664\u3002\"\n+ },\n \"msg_updated\": {\n \"en\": \"Successfully updated\",\n \"zh_CN\": \"\u8bbe\u7f6e\u5df2\u66f4\u65b0\",\n@@ -319,6 +324,21 @@\n \"zh_CN\": \"\u5f00\u542f\u540e\uff0c\u672c\u6269\u5c55\u4f1a\u81ea\u52a8\u8bc6\u522b\u8f93\u5165\u6846\u4ee5\u53ca\u5e38\u89c1\u7f16\u8f91\u5668\uff0c\u5982 CodeMirror\u3001ACE \u548c Monaco\u3002\",\n \"zh_TW\": \"\u958b\u555f\u540e\uff0c\u672c\u7a0b\u5f0f\u6703\u81ea\u52d5\u8b58\u5225\u8f38\u5165\u6846\u4ee5\u53ca\u5e38\u898b\u7de8\u8f2f\u5668\uff0c\u5982 CodeMirror\u3001ACE \u548c Monaco\u3002\"\n },\n+ \"opt_pdf_blackwhitelist_help\": {\n+ \"en\": \"Blacklisted PDF links will not jump to Saladict PDF Viewer.\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\u5339\u914d\u7684 PDF \u94fe\u63a5\u5c06\u4e0d\u4f1a\u8df3\u8f6c\u5230 Saladict \u6253\u5f00\u3002\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\u5339\u914d\u7684 PDF \u9023\u7d50\u5c07\u4e0d\u6703\u8df3\u8f49\u5230 Saladict \u958b\u555f\u3002\"\n+ },\n+ \"opt_pdf_sniff\": {\n+ \"en\": \"Enable PDF Sniffer\",\n+ \"zh_CN\": \"\u9ed8\u8ba4\u7528\u672c\u6269\u5c55\u6d4f\u89c8 PDF\",\n+ \"zh_TW\": \"\u4f7f\u7528\u672c\u61c9\u7528\u7a0b\u5f0f\u700f\u89bd PDF\"\n+ },\n+ \"opt_pdf_sniff_help\": {\n+ \"en\": \"If turned on\uff0c PDF links will be automatically captured.\",\n+ \"zh_CN\": \"\u5f00\u542f\u540e\u6240\u6709 PDF \u94fe\u63a5\u5c06\u81ea\u52a8\u8df3\u8f6c\u5230\u672c\u6269\u5c55\u6253\u5f00\uff08\u5305\u62ec\u672c\u5730\uff0c\u5982\u679c\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u52fe\u9009\u4e86\u5141\u8bb8\uff09\u3002\",\n+ \"zh_TW\": \"\u958b\u555f\u5f8c\u6240\u6709 PDF \u9023\u7d50\u5c07\u81ea\u52d5\u8df3\u8f49\u5230\u672c\u64f4\u5145\u5957\u4ef6\u958b\u555f\uff08\u5305\u62ec\u672c\u5730\uff0c\u5982\u679c\u5728\u64f4\u5145\u5957\u4ef6\u7ba1\u7406\u9801\u9762\u52fe\u9078\u4e86\u5141\u8a31\uff09\u3002\"\n+ },\n \"opt_profile_change\": {\n \"en\": \"This option may change base on \\\"Profile\\\".\",\n \"zh_CN\": \"\u6b64\u9009\u9879\u4f1a\u56e0\u300c\u60c5\u666f\u6a21\u5f0f\u300d\u800c\u6539\u53d8\u3002\",\n@@ -329,6 +349,16 @@\n \"zh_CN\": \"\u8f93\u5165\u65f6\u663e\u793a\u5019\u9009\",\n \"zh_TW\": \"\u8f38\u5165\u6642\u986f\u793a\u5019\u9078\"\n },\n+ \"opt_sel_blackwhitelist\": {\n+ \"en\": \"Selection Black/White List\",\n+ \"zh_CN\": \"\u5212\u8bcd\u9ed1\u767d\u540d\u5355\",\n+ \"zh_TW\": \"\u9078\u8a5e\u9ed1\u767d\u540d\u55ae\"\n+ },\n+ \"opt_sel_blackwhitelist_help\": {\n+ \"en\": \"Saladict will not react to selection in blacklisted pages.\",\n+ \"zh_CN\": \"\u9ed1\u540d\u5355\u5339\u914d\u7684\u9875\u9762 Saladict \u5c06\u4e0d\u4f1a\u54cd\u5e94\u9f20\u6807\u5212\u8bcd\u3002\",\n+ \"zh_TW\": \"\u9ed1\u540d\u55ae\u5339\u914d\u7684\u9801\u9762 Saladict \u5c07\u4e0d\u6703\u97ff\u61c9\u6ed1\u9f20\u5283\u8a5e\u3002\"\n+ },\n \"opt_sel_lang\": {\n \"en\": \"Selection Languages\",\n \"zh_CN\": \"\u5212\u8bcd\u8bed\u8a00\",\ndiff --git a/src/options/components/options/BlackWhiteList/index.tsx b/src/options/components/options/BlackWhiteList/index.tsx\nnew file mode 100644\nindex 0000000..52708dd\n--- /dev/null\n+++ b/src/options/components/options/BlackWhiteList/index.tsx\n@@ -0,0 +1,69 @@\n+import React from 'react'\n+import { Props } from '../typings'\n+import { formItemLayout } from '../helpers'\n+import MatchPatternModal from '../../MatchPatternModal'\n+\n+import { FormComponentProps } from 'antd/lib/form'\n+import { Form, Button } from 'antd'\n+\n+export type BlackWhiteListProps = Props & FormComponentProps\n+\n+interface BlackWhiteListState {\n+ editingArea: '' | 'pdfWhitelist' | 'pdfBlacklist' | 'whitelist' | 'blacklist'\n+}\n+\n+export class BlackWhiteList extends React.Component<BlackWhiteListProps, BlackWhiteListState> {\n+ constructor (props: BlackWhiteListProps) {\n+ super(props)\n+ this.state = {\n+ editingArea: ''\n+ }\n+ }\n+\n+ closeModal = () => {\n+ this.setState({ editingArea: '' })\n+ }\n+\n+ render () {\n+ const { t, config } = this.props\n+\n+ return (\n+ <Form>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('opt_sel_blackwhitelist')}\n+ help={t('opt_sel_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'blacklist' })}\n+ >{t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'whitelist' })}\n+ >{t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={`PDF ${t('nav_BlackWhiteList')}`}\n+ help={t('opt_pdf_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'pdfBlacklist' })}\n+ >PDF {t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'pdfWhitelist' })}\n+ >PDF {t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <MatchPatternModal\n+ t={t}\n+ config={config}\n+ area={this.state.editingArea}\n+ onClose={this.closeModal}\n+ />\n+ </Form>\n+ )\n+ }\n+}\n+\n+export default BlackWhiteList\ndiff --git a/src/options/components/options/PDF/index.tsx b/src/options/components/options/PDF/index.tsx\nnew file mode 100644\nindex 0000000..3e7772d\n--- /dev/null\n+++ b/src/options/components/options/PDF/index.tsx\n@@ -0,0 +1,72 @@\n+import React from 'react'\n+import { Props } from '../typings'\n+import { updateConfigOrProfile, formItemLayout } from '../helpers'\n+import MatchPatternModal from '../../MatchPatternModal'\n+\n+import { FormComponentProps } from 'antd/lib/form'\n+import { Form, Switch, Button } from 'antd'\n+\n+export type PDFProps = Props & FormComponentProps\n+\n+interface PDFState {\n+ editingArea: '' | 'pdfWhitelist' | 'pdfBlacklist'\n+}\n+\n+export class PDF extends React.Component<PDFProps, PDFState> {\n+ constructor (props: PDFProps) {\n+ super(props)\n+\n+ this.state = {\n+ editingArea: ''\n+ }\n+ }\n+\n+ closeModal = () => {\n+ this.setState({ editingArea: '' })\n+ }\n+\n+ render () {\n+ const { t, config } = this.props\n+ const { getFieldDecorator } = this.props.form\n+\n+ return (\n+ <Form>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('opt_pdf_sniff')}\n+ help={t('opt_pdf_sniff_help')}\n+ >{\n+ getFieldDecorator('config#pdfSniff', {\n+ initialValue: config.pdfSniff,\n+ valuePropName: 'checked',\n+ })(\n+ <Switch />\n+ )\n+ }</Form.Item>\n+ <Form.Item\n+ {...formItemLayout}\n+ label={t('nav_BlackWhiteList')}\n+ help={t('opt_pdf_blackwhitelist_help')}\n+ >\n+ <Button\n+ style={{ marginRight: 10 }}\n+ onClick={() => this.setState({ editingArea: 'pdfBlacklist' })}\n+ >PDF {t('common:blacklist')}</Button>\n+ <Button\n+ onClick={() => this.setState({ editingArea: 'pdfWhitelist' })}\n+ >PDF {t('common:whitelist')}</Button>\n+ </Form.Item>\n+ <MatchPatternModal\n+ t={t}\n+ config={config}\n+ area={this.state.editingArea}\n+ onClose={this.closeModal}\n+ />\n+ </Form>\n+ )\n+ }\n+}\n+\n+export default Form.create<PDFProps>({\n+ onValuesChange: updateConfigOrProfile\n+})(PDF)\n", "diff --git a/Makefile.toml b/Makefile.toml\nindex e7d2b20..490d6e2 100644\n--- a/Makefile.toml\n+++ b/Makefile.toml\n@@ -82,7 +82,7 @@ end\n '''\n \n [tasks.build-plugins-release]\n-env = { \"CARGO_MAKE_WORKSPACE_SKIP_MEMBERS\" = [\".\"] }\n+env = { \"CARGO_MAKE_WORKSPACE_INCLUDE_MEMBERS\" = [\"default-plugins/status-bar\", \"default-plugins/strider\", \"default-plugins/tab-bar\"] }\n run_task = { name = \"build-release\", fork = true }\n \n [tasks.wasm-opt-plugins]\n@@ -129,15 +129,16 @@ args = [\"install\", \"cross\"]\n [tasks.publish]\n clear = true\n workspace = false\n-dependencies = [\"build-plugins-release\", \"wasm-opt-plugins\", \"release-commit\", \"build-release\", \"publish-zellij-tile\", \"publish-zellij-tile-utils\", \"publish-zellij-utils\", \"publish-zellij-client\", \"publish-zellij-server\"]\n+dependencies = [\"build-plugins-release\", \"wasm-opt-plugins\", \"release-commit\"]\n run_task = \"publish-zellij\"\n \n [tasks.release-commit]\n dependencies = [\"commit-all\", \"tag-release\"]\n command = \"git\"\n-args = [\"push\", \"--atomic\", \"upstream\", \"main\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n+args = [\"push\", \"--atomic\", \"origin\", \"main\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n \n [tasks.commit-all]\n+ignore_errors = true\n command = \"git\"\n args = [\"commit\", \"-aem\", \"chore(release): v${CARGO_MAKE_CRATE_VERSION}\"]\n \n@@ -148,31 +149,32 @@ args = [\"tag\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n [tasks.publish-zellij-tile]\n ignore_errors = true\n cwd = \"zellij-tile\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-client]\n+ignore_errors = true\n dependencies = [\"publish-zellij-utils\"]\n cwd = \"zellij-client\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-server]\n+ignore_errors = true\n dependencies = [\"publish-zellij-utils\"]\n cwd = \"zellij-server\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-utils]\n+ignore_errors = true\n dependencies = [\"publish-zellij-tile\"]\n cwd = \"zellij-utils\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-tile-utils]\n ignore_errors = true\n cwd = \"zellij-tile-utils\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij]\n dependencies = [\"publish-zellij-client\", \"publish-zellij-server\", \"publish-zellij-utils\"]\n command = \"cargo\"\n args = [\"publish\"]\n-\n-\n", "diff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 90c5a27..db6457b 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -101,6 +101,7 @@ jobs:\n --exclude-mail \\\n --exclude fonts.gstatic.com \\\n --exclude github.com \\\n+ --exclude github.io \\\n --no-progress \\\n --github-token ${{ steps.generate_token.outputs.token }}\n \n", "diff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\nindex 636cd21..76afff7 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.doRepeatedly;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n@@ -24,11 +26,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.assertj.core.util.Files;\n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -38,7 +47,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -48,9 +59,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.assertj.core.util.Files;\n-import org.junit.*;\n-import org.junit.rules.*;\n \n public class BrokerRecoveryTest\n {\n@@ -360,17 +368,12 @@ public class BrokerRecoveryTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n // wait until stream processor and scheduler process the lock task event which is not re-processed on recovery\n doRepeatedly(() ->\n {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n+ ClockUtil.addTime(Duration.ofSeconds(60)); // retriggers lock expiration check in broker\n return null;\n }).until(t -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n \ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\nindex 5ff1301..0ffe98d 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n \n@@ -23,11 +25,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.experimental.categories.Category;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -37,7 +46,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -47,9 +58,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.junit.*;\n-import org.junit.experimental.categories.Category;\n-import org.junit.rules.*;\n \n public class BrokerRestartTest\n {\n@@ -360,11 +368,7 @@ public class BrokerRestartTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n waitUntil(() -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n recordingTaskHandler.clear();\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\nindex 49b527d..a322fbe 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n@@ -353,7 +353,7 @@ public class TaskSubscriptionTest\n waitUntil(() -> taskHandler.getHandledTasks().size() == 1);\n \n // when\n- ClockUtil.setCurrentTime(Instant.now().plus(Duration.ofMinutes(5)));\n+ ClockUtil.addTime(Duration.ofMinutes(5));\n \n // then\n waitUntil(() -> taskHandler.getHandledTasks().size() == 2);\n"]
5
["662c5d1346ea2b01c0bc3c11c648cbdf92035fe2", "9b1c0fc20b614513384a1e562317dbf076eb8ef0", "65574eea5da54bf4722ecb551b42f8ff6088f33b", "ce0539a32b927a3559feebf8f5307e3863e992a1", "7ece3a9a16780dc6c633bbd903d36ce0aefd6a8a"]
["docs", "feat", "build", "ci", "test"]
coordinator accepts a request transformer instead of a list of operations The request transformer can generate the operations from the current topology. This helps to - ensure that the operations are generated based on the latest topology. When concurrent changes happens, coordinator can detect it. Previously it was unclear because by the time handle apply operations, the cluster topology might have changed. - return the simulated final topology as part of the result,add Expr.equals benchmark,make sure root is being watched and setRoot called when it changes,Publish crates,add travis file
["diff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\nindex 8bb5c3d..f8f5e24 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.topology.changes;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.topology.state.ClusterTopology;\n import io.camunda.zeebe.topology.state.TopologyChangeOperation;\n+import io.camunda.zeebe.util.Either;\n import java.util.List;\n \n public interface TopologyChangeCoordinator {\n@@ -39,4 +40,16 @@ public interface TopologyChangeCoordinator {\n ActorFuture<Boolean> hasCompletedChanges(final long version);\n \n ActorFuture<ClusterTopology> getCurrentTopology();\n+\n+ ActorFuture<TopologyChangeResult> applyOperations(TopologyChangeRequest request);\n+\n+ record TopologyChangeResult(\n+ ClusterTopology currentTopology,\n+ ClusterTopology finalTopology,\n+ List<TopologyChangeOperation> operations) {}\n+\n+ interface TopologyChangeRequest {\n+ Either<Exception, List<TopologyChangeOperation>> operations(\n+ final ClusterTopology currentTopology);\n+ }\n }\ndiff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\nindex 13ec754..877fc3c 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n@@ -103,6 +103,62 @@ public class TopologyChangeCoordinatorImpl implements TopologyChangeCoordinator \n return clusterTopologyManager.getClusterTopology();\n }\n \n+ @Override\n+ public ActorFuture<TopologyChangeResult> applyOperations(final TopologyChangeRequest request) {\n+ final ActorFuture<TopologyChangeResult> future = executor.createFuture();\n+ clusterTopologyManager\n+ .getClusterTopology()\n+ .onComplete(\n+ (currentClusterTopology, errorOnGettingTopology) -> {\n+ if (errorOnGettingTopology != null) {\n+ future.completeExceptionally(errorOnGettingTopology);\n+ return;\n+ }\n+\n+ final var operationsEither = request.operations(currentClusterTopology);\n+ if (operationsEither.isLeft()) {\n+ future.completeExceptionally(operationsEither.getLeft());\n+ return;\n+ }\n+ final var operations = operationsEither.get();\n+ if (operations.isEmpty()) {\n+ // No operations to apply\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, currentClusterTopology, operations));\n+ return;\n+ }\n+\n+ final ActorFuture<ClusterTopology> validation =\n+ validateTopologyChangeRequest(currentClusterTopology, operations);\n+\n+ validation.onComplete(\n+ (simulatedFinalTopology, validationError) -> {\n+ if (validationError != null) {\n+ future.completeExceptionally(validationError);\n+ return;\n+ }\n+\n+ // if the validation was successful, apply the changes\n+ final ActorFuture<ClusterTopology> applyFuture = executor.createFuture();\n+ applyTopologyChange(\n+ operations, currentClusterTopology, simulatedFinalTopology, applyFuture);\n+\n+ applyFuture.onComplete(\n+ (ignore, error) -> {\n+ if (error == null) {\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, simulatedFinalTopology, operations));\n+ } else {\n+ future.completeExceptionally(error);\n+ }\n+ });\n+ });\n+ });\n+ return future;\n+ }\n+\n private ActorFuture<ClusterTopology> validateTopologyChangeRequest(\n final ClusterTopology currentClusterTopology,\n final List<TopologyChangeOperation> operations) {\n", "diff --git a/ibis/tests/benchmarks/test_benchmarks.py b/ibis/tests/benchmarks/test_benchmarks.py\nindex 78305bb..9c7e6d7 100644\n--- a/ibis/tests/benchmarks/test_benchmarks.py\n+++ b/ibis/tests/benchmarks/test_benchmarks.py\n@@ -1,3 +1,4 @@\n+import copy\n import functools\n import itertools\n import string\n@@ -340,8 +341,9 @@ def test_execute(benchmark, expression_fn, pt):\n benchmark(expr.execute)\n \n \n-def test_repr_tpc_h02(benchmark):\n- part = ibis.table(\[email protected]\n+def part():\n+ return ibis.table(\n dict(\n p_partkey=\"int64\",\n p_size=\"int64\",\n@@ -350,7 +352,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"part\",\n )\n- supplier = ibis.table(\n+\n+\[email protected]\n+def supplier():\n+ return ibis.table(\n dict(\n s_suppkey=\"int64\",\n s_nationkey=\"int64\",\n@@ -362,7 +368,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"supplier\",\n )\n- partsupp = ibis.table(\n+\n+\[email protected]\n+def partsupp():\n+ return ibis.table(\n dict(\n ps_partkey=\"int64\",\n ps_suppkey=\"int64\",\n@@ -370,14 +380,25 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"partsupp\",\n )\n- nation = ibis.table(\n+\n+\[email protected]\n+def nation():\n+ return ibis.table(\n dict(n_nationkey=\"int64\", n_regionkey=\"int64\", n_name=\"string\"),\n name=\"nation\",\n )\n- region = ibis.table(\n+\n+\[email protected]\n+def region():\n+ return ibis.table(\n dict(r_regionkey=\"int64\", r_name=\"string\"), name=\"region\"\n )\n \n+\[email protected]\n+def tpc_h02(part, supplier, partsupp, nation, region):\n REGION = \"EUROPE\"\n SIZE = 25\n TYPE = \"BRASS\"\n@@ -420,7 +441,7 @@ def test_repr_tpc_h02(benchmark):\n ]\n )\n \n- expr = q.sort_by(\n+ return q.sort_by(\n [\n ibis.desc(q.s_acctbal),\n q.n_name,\n@@ -429,7 +450,9 @@ def test_repr_tpc_h02(benchmark):\n ]\n ).limit(100)\n \n- benchmark(repr, expr)\n+\n+def test_repr_tpc_h02(benchmark, tpc_h02):\n+ benchmark(repr, tpc_h02)\n \n \n def test_repr_huge_union(benchmark):\n@@ -478,3 +501,7 @@ def test_complex_datatype_builtins(benchmark, func):\n )\n )\n benchmark(func, datatype)\n+\n+\n+def test_large_expr_equals(benchmark, tpc_h02):\n+ benchmark(ir.Expr.equals, tpc_h02, copy.deepcopy(tpc_h02))\n", "diff --git a/packages/core/src/components/nav/nav.tsx b/packages/core/src/components/nav/nav.tsx\nindex 5aaacb6..27241ee 100644\n--- a/packages/core/src/components/nav/nav.tsx\n+++ b/packages/core/src/components/nav/nav.tsx\n@@ -1,4 +1,4 @@\n-import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n+import { Component, Element, Event, EventEmitter, Listen, Method, Prop, Watch } from '@stencil/core';\n import {\n Animation,\n AnimationController,\n@@ -103,10 +103,19 @@ export class Nav implements PublicNav, NavContainer {\n }\n this.init = true;\n if (!this.useRouter) {\n+ console.log('componentDidLoadImpl: ', this.root);\n componentDidLoadImpl(this);\n }\n }\n \n+ @Watch('root')\n+ updateRootComponent(): any {\n+ console.log('updateRootComponent: ', this.root);\n+ if (this.init) {\n+ return this.setRoot(this.root);\n+ }\n+ }\n+\n getViews(): PublicViewController[] {\n return getViews(this);\n }\ndiff --git a/packages/core/src/components/nav/test/set-root/index.html b/packages/core/src/components/nav/test/set-root/index.html\nnew file mode 100644\nindex 0000000..823c9ed\n--- /dev/null\n+++ b/packages/core/src/components/nav/test/set-root/index.html\n@@ -0,0 +1,110 @@\n+<!DOCTYPE html>\n+<html dir=\"ltr\">\n+<head>\n+ <meta charset=\"UTF-8\">\n+ <title>Nav</title>\n+ <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no\">\n+ <script src=\"/dist/ionic.js\"></script>\n+</head>\n+<body onload=\"initiaize()\">\n+ <ion-app>\n+ <ion-nav root=\"page-one\"></ion-nav>\n+ </ion-app>\n+</body>\n+\n+<script>\n+\n+ class PageOne extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page One</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page One</h1>\n+ <ion-button class=\"next\">Go to Page Two</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const button = this.querySelector('ion-button');\n+ button.addEventListener('click', async () => {\n+ this.closest('ion-nav').push('page-two');\n+ });\n+ }\n+ }\n+\n+ class PageTwo extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page Two</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page Two</h1>\n+ <ion-button class=\"next\">Go to Page Three</ion-button>\n+ <ion-button class=\"previous\">Go Back</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const previousButton = this.querySelector('ion-button.previous');\n+ previousButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').pop();\n+ });\n+\n+ const nextButton = this.querySelector('ion-button.next');\n+ nextButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').push('page-three');\n+ });\n+ }\n+ }\n+\n+ class PageThree extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page Three</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page Three</h1>\n+ <ion-button class=\"previous\">Go Back</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const previousButton = this.querySelector('ion-button.previous');\n+ previousButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').pop();\n+ });\n+ }\n+ }\n+\n+ customElements.define('page-one', PageOne);\n+ customElements.define('page-two', PageTwo);\n+ customElements.define('page-three', PageThree);\n+\n+ async function initiaize() {\n+ const nav = document.querySelector('ion-nav');\n+ await nav.componentOnReady();\n+ nav.root = 'page-one';\n+\n+ setInterval(() => {\n+ if (nav.root === 'page-one') {\n+ nav.root = 'page-two';\n+ } else if ( nav.root === 'page-two') {\n+ nav.root = 'page-three';\n+ } else {\n+ nav.root = 'page-one';\n+ }\n+ }, 1000);\n+ }\n+\n+</script>\n+</html>\n", "diff --git a/CHANGELOG.md b/CHANGELOG.md\nindex 7b98b44..f17ad6f 100644\n--- a/CHANGELOG.md\n+++ b/CHANGELOG.md\n@@ -7,6 +7,9 @@\n \n - **(css/parser)** Fix parsing of at rules (#3328) ([506a310](https://github.com/swc-project/swc/commit/506a31078aaebf50129658f096bbd5929995205f))\n \n+\n+- **(es/compat)** Fix regression of `destructuring` (#3326) ([6d1ad36](https://github.com/swc-project/swc/commit/6d1ad368aca53ee64a63ae565cd015909f2f4458))\n+\n ### Performance\n \n \ndiff --git a/Cargo.lock b/Cargo.lock\nindex 3c6598b..4baa252 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -2652,7 +2652,7 @@ dependencies = [\n \n [[package]]\n name = \"swc\"\n-version = \"0.116.15\"\n+version = \"0.116.16\"\n dependencies = [\n \"ahash\",\n \"anyhow\",\n@@ -3097,7 +3097,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecma_transforms\"\n-version = \"0.113.3\"\n+version = \"0.113.4\"\n dependencies = [\n \"pretty_assertions 0.7.2\",\n \"sourcemap\",\n@@ -3157,7 +3157,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecma_transforms_compat\"\n-version = \"0.68.2\"\n+version = \"0.68.3\"\n dependencies = [\n \"ahash\",\n \"arrayvec 0.7.2\",\n@@ -3366,7 +3366,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecmascript\"\n-version = \"0.110.14\"\n+version = \"0.110.15\"\n dependencies = [\n \"swc_ecma_ast\",\n \"swc_ecma_codegen\",\ndiff --git a/crates/swc/Cargo.toml b/crates/swc/Cargo.toml\nindex 756cfc8..2f02d22 100644\n--- a/crates/swc/Cargo.toml\n+++ b/crates/swc/Cargo.toml\n@@ -9,7 +9,7 @@ include = [\"Cargo.toml\", \"src/**/*.rs\"]\n license = \"Apache-2.0\"\n name = \"swc\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.116.15\"\n+version = \"0.116.16\"\n \n [lib]\n name = \"swc\"\n@@ -55,7 +55,7 @@ swc_ecma_loader = {version = \"0.27.0\", path = \"../swc_ecma_loader\", features = [\n swc_ecma_minifier = {version = \"0.70.9\", path = \"../swc_ecma_minifier\"}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\"}\n swc_ecma_preset_env = {version = \"0.86.1\", path = \"../swc_ecma_preset_env\"}\n-swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", features = [\n+swc_ecma_transforms = {version = \"0.113.4\", path = \"../swc_ecma_transforms\", features = [\n \"compat\",\n \"module\",\n \"optimization\",\n@@ -64,11 +64,11 @@ swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", fea\n \"typescript\",\n ]}\n swc_ecma_transforms_base = {version = \"0.57.1\", path = \"../swc_ecma_transforms_base\"}\n-swc_ecma_transforms_compat = {version = \"0.68.2\", path = \"../swc_ecma_transforms_compat\"}\n+swc_ecma_transforms_compat = {version = \"0.68.3\", path = \"../swc_ecma_transforms_compat\"}\n swc_ecma_transforms_optimization = {version = \"0.83.0\", path = \"../swc_ecma_transforms_optimization\"}\n swc_ecma_utils = {version = \"0.64.0\", path = \"../swc_ecma_utils\"}\n swc_ecma_visit = {version = \"0.51.1\", path = \"../swc_ecma_visit\"}\n-swc_ecmascript = {version = \"0.110.14\", path = \"../swc_ecmascript\"}\n+swc_ecmascript = {version = \"0.110.15\", path = \"../swc_ecmascript\"}\n swc_node_comments = {version = \"0.4.0\", path = \"../swc_node_comments\"}\n swc_plugin_runner = {version = \"0.30.0\", path = \"../swc_plugin_runner\", optional = true}\n swc_visit = {version = \"0.3.0\", path = \"../swc_visit\"}\ndiff --git a/crates/swc_ecma_transforms/Cargo.toml b/crates/swc_ecma_transforms/Cargo.toml\nindex 1604f4e..a0aafae 100644\n--- a/crates/swc_ecma_transforms/Cargo.toml\n+++ b/crates/swc_ecma_transforms/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecma_transforms\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.113.3\"\n+version = \"0.113.4\"\n \n [package.metadata.docs.rs]\n all-features = true\n@@ -28,7 +28,7 @@ swc_common = {version = \"0.17.0\", path = \"../swc_common\"}\n swc_ecma_ast = {version = \"0.65.0\", path = \"../swc_ecma_ast\"}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\"}\n swc_ecma_transforms_base = {version = \"0.57.1\", path = \"../swc_ecma_transforms_base\"}\n-swc_ecma_transforms_compat = {version = \"0.68.2\", path = \"../swc_ecma_transforms_compat\", optional = true}\n+swc_ecma_transforms_compat = {version = \"0.68.3\", path = \"../swc_ecma_transforms_compat\", optional = true}\n swc_ecma_transforms_module = {version = \"0.74.0\", path = \"../swc_ecma_transforms_module\", optional = true}\n swc_ecma_transforms_optimization = {version = \"0.83.0\", path = \"../swc_ecma_transforms_optimization\", optional = true}\n swc_ecma_transforms_proposal = {version = \"0.74.0\", path = \"../swc_ecma_transforms_proposal\", optional = true}\ndiff --git a/crates/swc_ecma_transforms_compat/Cargo.toml b/crates/swc_ecma_transforms_compat/Cargo.toml\nindex 0ea6609..58374e3 100644\n--- a/crates/swc_ecma_transforms_compat/Cargo.toml\n+++ b/crates/swc_ecma_transforms_compat/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecma_transforms_compat\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.68.2\"\n+version = \"0.68.3\"\n # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n \n [features]\ndiff --git a/crates/swc_ecmascript/Cargo.toml b/crates/swc_ecmascript/Cargo.toml\nindex 63680a0..775208a 100644\n--- a/crates/swc_ecmascript/Cargo.toml\n+++ b/crates/swc_ecmascript/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecmascript\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.110.14\"\n+version = \"0.110.15\"\n \n [package.metadata.docs.rs]\n all-features = true\n@@ -39,7 +39,7 @@ swc_ecma_dep_graph = {version = \"0.58.0\", path = \"../swc_ecma_dep_graph\", option\n swc_ecma_minifier = {version = \"0.70.9\", path = \"../swc_ecma_minifier\", optional = true}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\", optional = true, default-features = false}\n swc_ecma_preset_env = {version = \"0.86.1\", path = \"../swc_ecma_preset_env\", optional = true}\n-swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", optional = true}\n+swc_ecma_transforms = {version = \"0.113.4\", path = \"../swc_ecma_transforms\", optional = true}\n swc_ecma_utils = {version = \"0.64.0\", path = \"../swc_ecma_utils\", optional = true}\n swc_ecma_visit = {version = \"0.51.1\", path = \"../swc_ecma_visit\", optional = true}\n \n", "diff --git a/.travis.yml b/.travis.yml\nnew file mode 100644\nindex 0000000..c08cc34\n--- /dev/null\n+++ b/.travis.yml\n@@ -0,0 +1,11 @@\n+sudo: false\n+\n+language: java\n+jdk: oraclejdk8\n+\n+branches:\n+ only:\n+ - master\n+\n+notifications:\n+ email: false\n"]
5
["dec860436916ef216998f80f8b2f9c39d00c064d", "b700285c1f27588922d9c56527cee721bb884682", "4be836f5655fb5356fde5ddd7437125f8574705d", "af53b9487f74ff28438928903fb1f2db93fe4fa8", "d0814a928601706635287fd3d9d3451d156b821a"]
["feat", "test", "refactor", "build", "ci"]
use trait objects for from_str Use `Box<dyn error::Error>` to allow solutions to use `?` to propagate errors.,Support ISNULL,update get-started,common routine for browser timezone Signed-off-by: Raju Udava <[email protected]>,update version (nightly.0)
["diff --git a/exercises/conversions/from_str.rs b/exercises/conversions/from_str.rs\nindex 41fccd7..4beebac 100644\n--- a/exercises/conversions/from_str.rs\n+++ b/exercises/conversions/from_str.rs\n@@ -2,6 +2,7 @@\n // Additionally, upon implementing FromStr, you can use the `parse` method\n // on strings to generate an object of the implementor type.\n // You can read more about it at https://doc.rust-lang.org/std/str/trait.FromStr.html\n+use std::error;\n use std::str::FromStr;\n \n #[derive(Debug)]\n@@ -23,7 +24,7 @@ struct Person {\n // If everything goes well, then return a Result of a Person object\n \n impl FromStr for Person {\n- type Err = String;\n+ type Err = Box<dyn error::Error>;\n fn from_str(s: &str) -> Result<Person, Self::Err> {\n }\n }\ndiff --git a/info.toml b/info.toml\nindex 2068750..4a1d3aa 100644\n--- a/info.toml\n+++ b/info.toml\n@@ -884,5 +884,5 @@ path = \"exercises/conversions/from_str.rs\"\n mode = \"test\"\n hint = \"\"\"\n The implementation of FromStr should return an Ok with a Person object,\n-or an Err with a string if the string is not valid.\n+or an Err with an error if the string is not valid.\n This is almost like the `try_from_into` exercise.\"\"\"\n", "diff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex f3991c0..549167a 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -3,13 +3,16 @@ use std::sync::Arc;\n \n use datafusion::{\n arrow::{\n- array::{ArrayRef, GenericStringArray, Int32Builder, StringBuilder, UInt32Builder},\n+ array::{\n+ ArrayRef, BooleanBuilder, GenericStringArray, Int32Builder, StringBuilder,\n+ UInt32Builder,\n+ },\n datatypes::DataType,\n },\n error::DataFusionError,\n logical_plan::create_udf,\n physical_plan::{\n- functions::{make_scalar_function, Volatility},\n+ functions::{make_scalar_function, ReturnTypeFunction, Signature, Volatility},\n udf::ScalarUDF,\n },\n };\n@@ -167,3 +170,24 @@ pub fn create_instr_udf() -> ScalarUDF {\n fun,\n )\n }\n+\n+pub fn create_isnull_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 1);\n+\n+ let mut builder = BooleanBuilder::new(1);\n+ builder.append_value(args[0].is_null(0))?;\n+\n+ Ok(Arc::new(builder.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Boolean).clone()));\n+\n+ ScalarUDF::new(\n+ \"isnull\",\n+ &Signature::any(1, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex 891283b..9004ffe 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -36,7 +36,7 @@ use self::context::*;\n use self::engine::context::SystemVar;\n use self::engine::udf::{\n create_connection_id_udf, create_current_user_udf, create_db_udf, create_instr_udf,\n- create_user_udf, create_version_udf,\n+ create_isnull_udf, create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1415,6 +1415,7 @@ impl QueryPlanner {\n ctx.register_udf(create_user_udf(props));\n ctx.register_udf(create_current_user_udf(props));\n ctx.register_udf(create_instr_udf());\n+ ctx.register_udf(create_isnull_udf());\n \n {\n let schema_provider = MemorySchemaProvider::new();\n", "diff --git a/docs/src/go-client/get-started.md b/docs/src/go-client/get-started.md\nindex 4f4405f..a792e0e 100755\n--- a/docs/src/go-client/get-started.md\n+++ b/docs/src/go-client/get-started.md\n@@ -199,14 +199,12 @@ workflowKey:1 bpmnProcessId:\"order-process\" version:1 workflowInstanceKey:6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n \n ## Work on a task\n@@ -322,7 +320,7 @@ it encounters a problem while processing the job.\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/go-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n When you run the above example you should see similar output:\n \ndiff --git a/docs/src/go-client/java-get-started-monitor-1.gif b/docs/src/go-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/go-client/java-get-started-monitor-2.gif b/docs/src/go-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/go-client/zeebe-monitor-1.png b/docs/src/go-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-2.png b/docs/src/go-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-3.png b/docs/src/go-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-3.png and /dev/null differ\ndiff --git a/docs/src/introduction/quickstart.md b/docs/src/introduction/quickstart.md\nindex 70abacf..68be28b 100644\n--- a/docs/src/introduction/quickstart.md\n+++ b/docs/src/introduction/quickstart.md\n@@ -215,7 +215,7 @@ and completed by a [job worker](/basics/job-workers.html). A job worker is a\n long living process which repeatedly tries to activate jobs for a given job\n type and completes them after executing its business logic. The `zbctl` also\n provides a command to spawn simple job workers using an external command or\n-script. The job worker will receive for every job the payload as JSON object on\n+script. The job worker will receive for every job the workflow instance variables as JSON object on\n `stdin` and has to return its result also as JSON object on `stdout` if it\n handled the job successfully.\n \ndiff --git a/docs/src/java-client/get-started.md b/docs/src/java-client/get-started.md\nindex 54d2208..afc1fd4 100755\n--- a/docs/src/java-client/get-started.md\n+++ b/docs/src/java-client/get-started.md\n@@ -21,9 +21,9 @@ You will be guided through the following steps:\n * [Zeebe Modeler](https://github.com/zeebe-io/zeebe-modeler/releases)\n * [Zeebe Monitor](https://github.com/zeebe-io/zeebe-simple-monitor/releases)\n \n-Before you begin to setup your project please start the broker, i.e. by running the start up script \n-`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the \n-address `localhost:26500`, which is used as contact point in this guide. In case your broker is \n+Before you begin to setup your project please start the broker, i.e. by running the start up script\n+`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the\n+address `localhost:26500`, which is used as contact point in this guide. In case your broker is\n available under another address please adjust the broker contact point when building the client.\n \n ## Set up a project\n@@ -182,14 +182,12 @@ Workflow instance created. Key: 6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n ## Work on a job\n \n@@ -205,12 +203,9 @@ Insert a few service tasks between the start and the end event.\n You need to set the type of each task, which identifies the nature of the work to be performed.\n Set the type of the first task to 'payment-service'.\n \n-Optionally, you can define parameters of the task by adding headers.\n-Add the header `method = VISA` to the first task.\n-\n Save the BPMN diagram and switch back to the main class.\n \n-Add the following lines to create a [job worker][] for the first jobs type:\n+Add the following lines to create a job worker for the first jobs type:\n \n ```java\n package io.zeebe;\n@@ -227,10 +222,7 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Collect money\");\n \n // ...\n \n@@ -252,40 +244,29 @@ public class Application\n Run the program and verify that the job is processed. You should see the output:\n \n ```\n-Collect money using payment method: VISA\n+Collect money\n ```\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/java-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n ## Work with data\n \n-Usually, a workflow is more than just tasks, there is also data flow.\n-The tasks need data as input and in order to produce data.\n+Usually, a workflow is more than just tasks, there is also a data flow. The worker gets the data from the workflow instance to do its work and send the result back to the workflow instance.\n \n-In Zeebe, the data is represented as a JSON document.\n-When you create a workflow instance, then you can pass the data as payload.\n-Within the workflow, you can use input and output mappings on tasks to control the data flow.\n+In Zeebe, the data is stored as key-value-pairs in form of variables. Variables can be set when the workflow instance is created. Within the workflow, variables can be read and modified by workers.\n \n-In our example, we want to create a workflow instance with the following data:\n+In our example, we want to create a workflow instance with the following variables:\n \n ```json\n-{\n- \"orderId\": 31243,\n- \"orderItems\": [435, 182, 376]\n-}\n+\"orderId\": 31243\n+\"orderItems\": [435, 182, 376]\n ```\n \n-The first task should take `orderId` as input and return `totalPrice` as result.\n-\n-Open the BPMN diagram and switch to the input-output-mappings of the first task.\n-Add the input mapping `$.orderId : $.orderId` and the output mapping `$.totalPrice : $.totalPrice`.\n+The first task should read `orderId` as input and return `totalPrice` as result.\n \n-Save the BPMN diagram and go back to the main class.\n-\n-Modify the create command and pass the data as variables.\n-Also, modify the job worker to read the jobs payload and complete the job with payload.\n+Modify the workflow instance create command and pass the data as variables. Also, modify the job worker to read the job variables and complete the job with a result.\n \n ```java\n package io.zeebe;\n@@ -313,23 +294,22 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- final Map<String, Object> payload = job.getPayloadAsMap();\n+ final Map<String, Object> variables = job.getVariablesAsMap();\n \n- System.out.println(\"Process order: \" + payload.get(\"orderId\"));\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Process order: \" + variables.get(\"orderId\"));\n+ System.out.println(\"Collect money\");\n \n // ...\n \n- payload.put(\"totalPrice\", 46.50);\n+ final Map<String, Object> result = new HashMap<>();\n+ result.put(\"totalPrice\", 46.50);\n \n jobClient.newCompleteCommand(job.getKey())\n- .payload(payload)\n+ .variables(result)\n .send()\n .join();\n })\n+ .fetchVariables(\"orderId\")\n .open();\n \n // ...\n@@ -337,16 +317,16 @@ public class Application\n }\n ```\n \n-Run the program and verify that the payload is mapped into the job. You should see the output:\n+Run the program and verify that the variable is read. You should see the output:\n \n ```\n-Process order: {\"orderId\":31243}\n-Collect money using payment method: VISA\n+Process order: 31243\n+Collect money\n ```\n \n-When we have a look at the Zeebe Monitor, then we can see how the payload is modified after the activity:\n+When we have a look at the Zeebe Monitor, then we can see that the variable `totalPrice` is set:\n \n-![zeebe-monitor-step-3](/java-client/zeebe-monitor-3.png)\n+![zeebe-monitor-step-3](/java-client/java-get-started-monitor-3.gif)\n \n ## What's next?\n \ndiff --git a/docs/src/java-client/java-get-started-monitor-1.gif b/docs/src/java-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-2.gif b/docs/src/java-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-3.gif b/docs/src/java-client/java-get-started-monitor-3.gif\nnew file mode 100644\nindex 0000000..1f6cb56\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-3.gif differ\ndiff --git a/docs/src/java-client/zeebe-monitor-1.png b/docs/src/java-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-2.png b/docs/src/java-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-3.png b/docs/src/java-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-3.png and /dev/null differ\n", "diff --git a/tests/playwright/tests/db/timezone.spec.ts b/tests/playwright/tests/db/timezone.spec.ts\nindex c966c2b..a30c7e4 100644\n--- a/tests/playwright/tests/db/timezone.spec.ts\n+++ b/tests/playwright/tests/db/timezone.spec.ts\n@@ -6,6 +6,7 @@ import { Api, UITypes } from 'nocodb-sdk';\n import { ProjectsPage } from '../../pages/ProjectsPage';\n import { isMysql, isPg, isSqlite } from '../../setup/db';\n import { getKnexConfig } from '../utils/config';\n+import { getBrowserTimezoneOffset } from '../utils/general';\n let api: Api<any>, records: any[];\n \n const columns = [\n@@ -680,11 +681,7 @@ test.describe.serial('External DB - DateTime column', async () => {\n await dashboard.rootPage.waitForTimeout(2000);\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n await dashboard.treeView.openBase({ title: 'datetimetable' });\n await dashboard.treeView.openTable({ title: 'MyTable' });\n@@ -844,11 +841,7 @@ test.describe('Ext DB MySQL : DB Timezone configured as HKT', () => {\n }\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n // connect after timezone is set\n await connectToExtDb(context);\ndiff --git a/tests/playwright/tests/utils/general.ts b/tests/playwright/tests/utils/general.ts\nindex 56a9e1a..45e9c6c 100644\n--- a/tests/playwright/tests/utils/general.ts\n+++ b/tests/playwright/tests/utils/general.ts\n@@ -50,4 +50,14 @@ function getDefaultPwd() {\n return 'Password123.';\n }\n \n-export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd };\n+function getBrowserTimezoneOffset() {\n+ // get timezone offset\n+ const timezoneOffset = new Date().getTimezoneOffset();\n+ const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n+ const minutes = Math.abs(timezoneOffset % 60);\n+ const sign = timezoneOffset <= 0 ? '+' : '-';\n+ const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ return formattedOffset;\n+}\n+\n+export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd, getBrowserTimezoneOffset };\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex e6f659c..cf93556 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -126,7 +126,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -134,7 +134,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex c58299b..6e51b6e 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full-repl = [\"erg_common/full-repl\"]\n full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.11\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.11\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.11\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.23\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.24-nightly.0\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 5f005a1..e1a9964 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n"]
5
["c3e7b831786c9172ed8bd5d150f3c432f242fba9", "f0a4b62f4bd2a1ba2caf37c764b117b352a2f2b3", "cf6d526123abab2689b24a06aaf03d8e4d6ddff4", "7d3e9b3a98b02f6cb1f3444dc7e3a0459aeb26a7", "92e940efeee199b1e0bbbc3c9eea7f3dc8221619"]
["fix", "feat", "docs", "test", "build"]
dedup redundant imports,fixed start types for size and opacity,fix golden tests for aws_vpn_connection,fixing deploying to kubernetes Signed-off-by: Rajesh Rajendran <[email protected]>,bundle and tree shake assets with webpack
["diff --git a/ibis/backends/base/__init__.py b/ibis/backends/base/__init__.py\nindex effd44c..a59c0ec 100644\n--- a/ibis/backends/base/__init__.py\n+++ b/ibis/backends/base/__init__.py\n@@ -31,7 +31,7 @@ import ibis.common.exceptions as exc\n import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n \n __all__ = ('BaseBackend', 'Database', 'connect')\n \ndiff --git a/ibis/backends/base/sql/__init__.py b/ibis/backends/base/sql/__init__.py\nindex e4f2129..7bbdaf9 100644\n--- a/ibis/backends/base/sql/__init__.py\n+++ b/ibis/backends/base/sql/__init__.py\n@@ -12,7 +12,7 @@ import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import BaseBackend\n from ibis.backends.base.sql.compiler import Compiler\n \ndiff --git a/ibis/backends/base/sql/alchemy/__init__.py b/ibis/backends/base/sql/alchemy/__init__.py\nindex 71cc0e8..ab89d7d 100644\n--- a/ibis/backends/base/sql/alchemy/__init__.py\n+++ b/ibis/backends/base/sql/alchemy/__init__.py\n@@ -11,7 +11,7 @@ import ibis\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.alchemy.database import AlchemyDatabase, AlchemyTable\n from ibis.backends.base.sql.alchemy.datatypes import (\ndiff --git a/ibis/backends/base/sql/alchemy/query_builder.py b/ibis/backends/base/sql/alchemy/query_builder.py\nindex 54c74ba..0ec432f 100644\n--- a/ibis/backends/base/sql/alchemy/query_builder.py\n+++ b/ibis/backends/base/sql/alchemy/query_builder.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import functools\n \n import sqlalchemy as sa\n-import sqlalchemy.sql as sql\n+from sqlalchemy import sql\n \n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/base/sql/compiler/base.py b/ibis/backends/base/sql/compiler/base.py\nindex 84102aa..fb44667 100644\n--- a/ibis/backends/base/sql/compiler/base.py\n+++ b/ibis/backends/base/sql/compiler/base.py\n@@ -7,7 +7,7 @@ import toolz\n \n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n class DML(abc.ABC):\ndiff --git a/ibis/backends/base/sql/compiler/query_builder.py b/ibis/backends/base/sql/compiler/query_builder.py\nindex a2d5214..95f5e8d 100644\n--- a/ibis/backends/base/sql/compiler/query_builder.py\n+++ b/ibis/backends/base/sql/compiler/query_builder.py\n@@ -8,7 +8,7 @@ import toolz\n import ibis.common.exceptions as com\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.compiler.base import DML, QueryAST, SetOp\n from ibis.backends.base.sql.compiler.select_builder import SelectBuilder, _LimitSpec\n from ibis.backends.base.sql.compiler.translator import ExprTranslator, QueryContext\ndiff --git a/ibis/backends/base/sql/registry/main.py b/ibis/backends/base/sql/registry/main.py\nindex 77f70a5..586ace5 100644\n--- a/ibis/backends/base/sql/registry/main.py\n+++ b/ibis/backends/base/sql/registry/main.py\n@@ -4,7 +4,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import (\n aggregate,\n binary_infix,\ndiff --git a/ibis/backends/base/sql/registry/timestamp.py b/ibis/backends/base/sql/registry/timestamp.py\nindex 412eab1..3c8571f 100644\n--- a/ibis/backends/base/sql/registry/timestamp.py\n+++ b/ibis/backends/base/sql/registry/timestamp.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n def extract_field(sql_attr):\ndiff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 8db6672..bb1b9ba 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -3,9 +3,9 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.backends.clickhouse.tests.conftest import (\n CLICKHOUSE_HOST,\n CLICKHOUSE_PASS,\ndiff --git a/ibis/backends/conftest.py b/ibis/backends/conftest.py\nindex 3a974da..ba7ad75 100644\n--- a/ibis/backends/conftest.py\n+++ b/ibis/backends/conftest.py\n@@ -20,7 +20,7 @@ if TYPE_CHECKING:\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import _get_backend_names\n \n TEST_TABLES = {\ndiff --git a/ibis/backends/dask/execution/util.py b/ibis/backends/dask/execution/util.py\nindex 61bff7e..7ed0c10 100644\n--- a/ibis/backends/dask/execution/util.py\n+++ b/ibis/backends/dask/execution/util.py\n@@ -9,13 +9,13 @@ import pandas as pd\n from dask.dataframe.groupby import SeriesGroupBy\n \n import ibis.backends.pandas.execution.util as pd_util\n-import ibis.common.graph as graph\n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n import ibis.util\n from ibis.backends.dask.core import execute\n from ibis.backends.pandas.trace import TraceTwoLevelDispatcher\n+from ibis.common import graph\n from ibis.expr.scope import Scope\n \n if TYPE_CHECKING:\ndiff --git a/ibis/backends/duckdb/datatypes.py b/ibis/backends/duckdb/datatypes.py\nindex fd6b8f5..52c0719 100644\n--- a/ibis/backends/duckdb/datatypes.py\n+++ b/ibis/backends/duckdb/datatypes.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import parsy as p\n import toolz\n \n-import ibis.util as util\n+from ibis import util\n from ibis.common.parsing import (\n COMMA,\n FIELD,\ndiff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\nindex 4ad2057..8299a28 100644\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -20,7 +20,7 @@ import ibis.config\n import ibis.expr.datatypes as dt\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.ddl import (\n CTAS,\ndiff --git a/ibis/backends/impala/client.py b/ibis/backends/impala/client.py\nindex 6655ce7..78d526f 100644\n--- a/ibis/backends/impala/client.py\n+++ b/ibis/backends/impala/client.py\n@@ -10,7 +10,7 @@ import sqlalchemy as sa\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import Database\n from ibis.backends.base.sql.compiler import DDL, DML\n from ibis.backends.base.sql.ddl import (\ndiff --git a/ibis/backends/impala/pandas_interop.py b/ibis/backends/impala/pandas_interop.py\nindex f410a8b..e687884 100644\n--- a/ibis/backends/impala/pandas_interop.py\n+++ b/ibis/backends/impala/pandas_interop.py\n@@ -22,7 +22,7 @@ from posixpath import join as pjoin\n import ibis.backends.pandas.client # noqa: F401\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.config import options\n \n \ndiff --git a/ibis/backends/impala/tests/conftest.py b/ibis/backends/impala/tests/conftest.py\nindex 1075ebe..a815be5 100644\n--- a/ibis/backends/impala/tests/conftest.py\n+++ b/ibis/backends/impala/tests/conftest.py\n@@ -13,8 +13,7 @@ import pytest\n \n import ibis\n import ibis.expr.types as ir\n-import ibis.util as util\n-from ibis import options\n+from ibis import options, util\n from ibis.backends.base import BaseBackend\n from ibis.backends.conftest import TEST_TABLES, _random_identifier\n from ibis.backends.impala.compiler import ImpalaCompiler, ImpalaExprTranslator\ndiff --git a/ibis/backends/impala/tests/test_client.py b/ibis/backends/impala/tests/test_client.py\nindex 0b56054..3fcca3a 100644\n--- a/ibis/backends/impala/tests/test_client.py\n+++ b/ibis/backends/impala/tests/test_client.py\n@@ -7,9 +7,9 @@ import pytz\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_ddl.py b/ibis/backends/impala/tests/test_ddl.py\nindex 870c4dc..2346a3d 100644\n--- a/ibis/backends/impala/tests/test_ddl.py\n+++ b/ibis/backends/impala/tests/test_ddl.py\n@@ -6,7 +6,7 @@ import ibis\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.ddl import fully_qualified_re\n from ibis.tests.util import assert_equal\n \ndiff --git a/ibis/backends/impala/tests/test_exprs.py b/ibis/backends/impala/tests/test_exprs.py\nindex cfc8552..1d6f44f 100644\n--- a/ibis/backends/impala/tests/test_exprs.py\n+++ b/ibis/backends/impala/tests/test_exprs.py\n@@ -5,10 +5,10 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.types as ir\n from ibis import literal as L\n from ibis.backends.impala.compiler import ImpalaCompiler\n+from ibis.expr import api\n from ibis.expr.datatypes import Category\n \n \ndiff --git a/ibis/backends/impala/tests/test_partition.py b/ibis/backends/impala/tests/test_partition.py\nindex 1f96e7d..44217a4 100644\n--- a/ibis/backends/impala/tests/test_partition.py\n+++ b/ibis/backends/impala/tests/test_partition.py\n@@ -6,7 +6,7 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_udf.py b/ibis/backends/impala/tests/test_udf.py\nindex 895918b..fd950d5 100644\n--- a/ibis/backends/impala/tests/test_udf.py\n+++ b/ibis/backends/impala/tests/test_udf.py\n@@ -9,11 +9,11 @@ import ibis\n import ibis.backends.impala as api\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n-import ibis.expr.rules as rules\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.impala import ddl\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import rules\n \n pytest.importorskip(\"impala\")\n \ndiff --git a/ibis/backends/impala/udf.py b/ibis/backends/impala/udf.py\nindex c6f2ef6..8b8b552 100644\n--- a/ibis/backends/impala/udf.py\n+++ b/ibis/backends/impala/udf.py\n@@ -21,7 +21,7 @@ import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.udf.validate as v\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import fixed_arity, sql_type_names\n from ibis.backends.impala.compiler import ImpalaExprTranslator\n \ndiff --git a/ibis/backends/mysql/__init__.py b/ibis/backends/mysql/__init__.py\nindex c0ddacb..50b331a 100644\n--- a/ibis/backends/mysql/__init__.py\n+++ b/ibis/backends/mysql/__init__.py\n@@ -8,7 +8,7 @@ import warnings\n from typing import Literal\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/mysql/compiler.py b/ibis/backends/mysql/compiler.py\nindex 13819cb..7456f71 100644\n--- a/ibis/backends/mysql/compiler.py\n+++ b/ibis/backends/mysql/compiler.py\n@@ -1,7 +1,7 @@\n from __future__ import annotations\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\ndiff --git a/ibis/backends/postgres/tests/test_functions.py b/ibis/backends/postgres/tests/test_functions.py\nindex 33c6d2e..0f377e3 100644\n--- a/ibis/backends/postgres/tests/test_functions.py\n+++ b/ibis/backends/postgres/tests/test_functions.py\n@@ -11,9 +11,9 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis import literal as L\n from ibis.expr.window import rows_with_max_lookback\n \ndiff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py\nindex 1b42080..b994911 100644\n--- a/ibis/backends/pyspark/__init__.py\n+++ b/ibis/backends/pyspark/__init__.py\n@@ -14,8 +14,7 @@ import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.expr.types as types\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.compiler import Compiler, TableSetFormatter\n from ibis.backends.base.sql.ddl import (\n@@ -217,16 +216,16 @@ class Backend(BaseSQLBackend):\n **kwargs: Any,\n ) -> Any:\n \"\"\"Execute an expression.\"\"\"\n- if isinstance(expr, types.Table):\n+ if isinstance(expr, ir.Table):\n return self.compile(expr, timecontext, params, **kwargs).toPandas()\n- elif isinstance(expr, types.Column):\n+ elif isinstance(expr, ir.Column):\n # expression must be named for the projection\n if not expr.has_name():\n expr = expr.name(\"tmp\")\n return self.compile(\n expr.to_projection(), timecontext, params, **kwargs\n ).toPandas()[expr.get_name()]\n- elif isinstance(expr, types.Scalar):\n+ elif isinstance(expr, ir.Scalar):\n compiled = self.compile(expr, timecontext, params, **kwargs)\n if isinstance(compiled, Column):\n # attach result column to a fake DataFrame and\ndiff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py\nindex 0288062..ccc8a97 100644\n--- a/ibis/backends/pyspark/tests/test_ddl.py\n+++ b/ibis/backends/pyspark/tests/test_ddl.py\n@@ -5,7 +5,7 @@ import pytest\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pyspark = pytest.importorskip(\"pyspark\")\ndiff --git a/ibis/backends/sqlite/tests/test_client.py b/ibis/backends/sqlite/tests/test_client.py\nindex 95aa24d..ad64700 100644\n--- a/ibis/backends/sqlite/tests/test_client.py\n+++ b/ibis/backends/sqlite/tests/test_client.py\n@@ -5,8 +5,8 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.types as ir\n+from ibis import config\n \n pytest.importorskip(\"sqlalchemy\")\n \ndiff --git a/ibis/expr/format.py b/ibis/expr/format.py\nindex e3d48cd..85fab3f 100644\n--- a/ibis/expr/format.py\n+++ b/ibis/expr/format.py\n@@ -9,13 +9,13 @@ from typing import Any, Callable, Deque, Iterable, Mapping, Tuple\n import rich.pretty\n \n import ibis\n-import ibis.common.graph as graph\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n import ibis.expr.window as win\n-import ibis.util as util\n+from ibis import util\n+from ibis.common import graph\n \n Aliases = Mapping[ops.TableNode, int]\n Deps = Deque[Tuple[int, ops.TableNode]]\ndiff --git a/ibis/expr/operations/relations.py b/ibis/expr/operations/relations.py\nindex 080ddcd..de44a15 100644\n--- a/ibis/expr/operations/relations.py\n+++ b/ibis/expr/operations/relations.py\n@@ -11,7 +11,7 @@ import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute\n from ibis.expr.deferred import Deferred\n from ibis.expr.operations.core import Named, Node, Value\ndiff --git a/ibis/expr/rules.py b/ibis/expr/rules.py\nindex 9b1a3b7..d40700e 100644\n--- a/ibis/expr/rules.py\n+++ b/ibis/expr/rules.py\n@@ -11,7 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute, optional\n from ibis.common.validators import (\n bool_,\ndiff --git a/ibis/expr/timecontext.py b/ibis/expr/timecontext.py\nindex 7ecd8e7..9620d6c 100644\n--- a/ibis/expr/timecontext.py\n+++ b/ibis/expr/timecontext.py\n@@ -38,8 +38,8 @@ from typing import TYPE_CHECKING, Any\n import numpy as np\n \n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.operations as ops\n+from ibis import config\n \n if TYPE_CHECKING:\n import pandas as pd\ndiff --git a/ibis/expr/types/groupby.py b/ibis/expr/types/groupby.py\nindex 138f92e..97aaaa2 100644\n--- a/ibis/expr/types/groupby.py\n+++ b/ibis/expr/types/groupby.py\n@@ -22,7 +22,7 @@ from typing import Iterable, Sequence\n import ibis.expr.analysis as an\n import ibis.expr.types as ir\n import ibis.expr.window as _window\n-import ibis.util as util\n+from ibis import util\n from ibis.expr.deferred import Deferred\n \n _function_types = tuple(\ndiff --git a/ibis/expr/window.py b/ibis/expr/window.py\nindex 5ef3bb1..3e0efdc 100644\n--- a/ibis/expr/window.py\n+++ b/ibis/expr/window.py\n@@ -11,7 +11,7 @@ import toolz\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.exceptions import IbisInputError\n from ibis.common.grounds import Comparable\n \ndiff --git a/ibis/tests/expr/test_decimal.py b/ibis/tests/expr/test_decimal.py\nindex 85d8eb2..12b809b 100644\n--- a/ibis/tests/expr/test_decimal.py\n+++ b/ibis/tests/expr/test_decimal.py\n@@ -3,10 +3,10 @@ import operator\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_type_metadata(lineitem):\ndiff --git a/ibis/tests/expr/test_interactive.py b/ibis/tests/expr/test_interactive.py\nindex cea1945..0c5613b 100644\n--- a/ibis/tests/expr/test_interactive.py\n+++ b/ibis/tests/expr/test_interactive.py\n@@ -14,7 +14,7 @@\n \n import pytest\n \n-import ibis.config as config\n+from ibis import config\n from ibis.tests.expr.mocks import MockBackend\n \n \ndiff --git a/ibis/tests/expr/test_table.py b/ibis/tests/expr/test_table.py\nindex 04f4a7d..3f77985 100644\n--- a/ibis/tests/expr/test_table.py\n+++ b/ibis/tests/expr/test_table.py\n@@ -10,13 +10,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as an\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n from ibis import literal as L\n from ibis.common.exceptions import RelationError\n+from ibis.expr import api\n from ibis.expr.types import Column, Table\n from ibis.tests.expr.mocks import MockAlchemyBackend, MockBackend\n from ibis.tests.util import assert_equal, assert_pickle_roundtrip\ndiff --git a/ibis/tests/expr/test_temporal.py b/ibis/tests/expr/test_temporal.py\nindex e76e71c..9a0f43f 100644\n--- a/ibis/tests/expr/test_temporal.py\n+++ b/ibis/tests/expr/test_temporal.py\n@@ -5,10 +5,10 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_temporal_literals():\ndiff --git a/ibis/tests/expr/test_timestamp.py b/ibis/tests/expr/test_timestamp.py\nindex 6601c8b..7782787 100644\n--- a/ibis/tests/expr/test_timestamp.py\n+++ b/ibis/tests/expr/test_timestamp.py\n@@ -5,11 +5,11 @@ import pandas as pd\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_field_select(alltypes):\ndiff --git a/ibis/tests/expr/test_value_exprs.py b/ibis/tests/expr/test_value_exprs.py\nindex 4c3d475..9eb247c 100644\n--- a/ibis/tests/expr/test_value_exprs.py\n+++ b/ibis/tests/expr/test_value_exprs.py\n@@ -15,13 +15,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as L\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n from ibis import _, literal\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import api\n from ibis.tests.util import assert_equal\n \n \ndiff --git a/ibis/tests/expr/test_visualize.py b/ibis/tests/expr/test_visualize.py\nindex 5525944..253564f 100644\n--- a/ibis/tests/expr/test_visualize.py\n+++ b/ibis/tests/expr/test_visualize.py\n@@ -9,8 +9,8 @@ import ibis.expr.types as ir\n \n pytest.importorskip('graphviz')\n \n-import ibis.expr.api as api # noqa: E402\n import ibis.expr.visualize as viz # noqa: E402\n+from ibis.expr import api # noqa: E402\n \n pytestmark = pytest.mark.skipif(\n int(os.environ.get('CONDA_BUILD', 0)) == 1, reason='CONDA_BUILD defined'\ndiff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 2ad5453..3aa8c3d 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -15,8 +15,8 @@\n import operator\n \n import pytest\n-import sqlalchemy.sql as sql\n from sqlalchemy import func as F\n+from sqlalchemy import sql\n from sqlalchemy import types as sat\n \n import ibis\ndiff --git a/ibis/tests/util.py b/ibis/tests/util.py\nindex f79d09a..025bfc7 100644\n--- a/ibis/tests/util.py\n+++ b/ibis/tests/util.py\n@@ -5,7 +5,7 @@ from __future__ import annotations\n import pickle\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n \n \n def assert_equal(left, right):\ndiff --git a/pyproject.toml b/pyproject.toml\nindex f2146d4..492ad9e 100644\n--- a/pyproject.toml\n+++ b/pyproject.toml\n@@ -310,6 +310,7 @@ select = [\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n+ \"PLR\", # pylint import style\n \"PLW\", # pylint\n \"RET\", # flake8-return\n \"RUF\", # ruff-specific rules\n", "diff --git a/core/main/src/Core/Particle.ts b/core/main/src/Core/Particle.ts\nindex 1aa6fba..6ea6ffc 100644\n--- a/core/main/src/Core/Particle.ts\n+++ b/core/main/src/Core/Particle.ts\n@@ -271,7 +271,7 @@ export class Particle implements IParticle {\n }\n }\n \n- const sizeAnimation = this.options.size.animation;\n+ const sizeAnimation = sizeOptions.animation;\n \n if (sizeAnimation.enable) {\n this.size.status = AnimationStatus.increasing;\n@@ -279,7 +279,8 @@ export class Particle implements IParticle {\n if (!randomSize) {\n switch (sizeAnimation.startValue) {\n case StartValueType.min:\n- this.size.value = sizeAnimation.minimumValue * pxRatio;\n+ this.size.value = NumberUtils.getRangeMin(sizeOptions.value) * pxRatio;\n+ this.size.status = AnimationStatus.increasing;\n \n break;\n \n@@ -287,11 +288,14 @@ export class Particle implements IParticle {\n this.size.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(sizeAnimation.minimumValue * pxRatio, this.size.value)\n );\n+ this.size.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.size.value = NumberUtils.getRangeMax(sizeOptions.value) * pxRatio;\n this.size.status = AnimationStatus.decreasing;\n \n break;\n@@ -393,7 +397,8 @@ export class Particle implements IParticle {\n if (!randomOpacity) {\n switch (opacityAnimation.startValue) {\n case StartValueType.min:\n- this.opacity.value = opacityAnimation.minimumValue;\n+ this.opacity.value = NumberUtils.getRangeMin(this.opacity.value);\n+ this.opacity.status = AnimationStatus.increasing;\n \n break;\n \n@@ -401,11 +406,14 @@ export class Particle implements IParticle {\n this.opacity.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(opacityAnimation.minimumValue, this.opacity.value)\n );\n+ this.opacity.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.opacity.value = NumberUtils.getRangeMax(this.opacity.value);\n this.opacity.status = AnimationStatus.decreasing;\n \n break;\ndiff --git a/presets/confetti/src/options.ts b/presets/confetti/src/options.ts\nindex 7fc6225..a713425 100644\n--- a/presets/confetti/src/options.ts\n+++ b/presets/confetti/src/options.ts\n@@ -28,7 +28,7 @@ export const loadOptions = (confettiOptions: RecursivePartial<IConfettiOptions>)\n animation: {\n enable: true,\n minimumValue: 0,\n- speed: 2,\n+ speed: 0.5,\n startValue: \"max\",\n destroy: \"min\",\n },\n", "diff --git a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\nindex d895677..cf10e3f 100644\n--- a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\n+++ b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\n@@ -12,6 +12,7 @@ provider \"aws\" {\n resource \"aws_vpn_connection\" \"vpn_connection\" {\n customer_gateway_id = \"dummy-customer-gateway-id\"\n type = \"ipsec.1\"\n+ vpn_gateway_id = \"vpn-gateway-id\"\n }\n \n resource \"aws_vpn_connection\" \"transit\" {\n@@ -23,10 +24,11 @@ resource \"aws_vpn_connection\" \"transit\" {\n resource \"aws_vpn_connection\" \"vpn_connection_withUsage\" {\n customer_gateway_id = \"dummy-customer-gateway-id2\"\n type = \"ipsec.1\"\n+ vpn_gateway_id = \"vpn-gateway-id\"\n }\n \n resource \"aws_vpn_connection\" \"transit_withUsage\" {\n customer_gateway_id = \"dummy-customer-gateway-id2\"\n type = \"ipsec.1\"\n transit_gateway_id = \"dummy-transit-gateway-id2\"\n-}\n\\ No newline at end of file\n+}\n", "diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml\nindex 7e42967..77e4abf 100644\n--- a/.github/workflows/frontend.yaml\n+++ b/.github/workflows/frontend.yaml\n@@ -22,26 +22,22 @@ jobs:\n ${{ runner.OS }}-build-\n ${{ runner.OS }}-\n \n+ - uses: azure/k8s-set-context@v1\n+ with:\n+ method: kubeconfig\n+ kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.\n+ id: setcontext\n - name: Install\n run: npm install\n \n- - name: Build\n- run: npm run build:staging\n- env:\n- ENVIRONMENT: staging\n-\n- - name: Deploy\n- env:\n- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}\n- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}\n- AWS_REGION: eu-central-1\n- AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}\n+ - name: Build and deploy\n run: |\n- aws configure set default.s3.signature_version s3v4\n- aws --endpoint-url https://${{secrets.DOMAIN_NAME}}/frontend/ s3 cp \\\n- --recursive \\\n- --region \"$AWS_REGION\" \\\n- public s3://$AWS_S3_BUCKET_NAME\n+ cd frontend\n+ bash build.sh\n+ cp -arl public frontend\n+ minio_pod=$(kubectl get po -n db -l app.kubernetes.io/name=minio -n db --output custom-columns=name:.metadata.name | tail -n+2)\n+ kubectl -n db cp frontend $minio_pod:/data/\n+ rm -rf frontend\n \n # - name: Debug Job\n # if: ${{ failure() }}\n", "diff --git a/package.json b/package.json\nindex c8051d2..b0a97fb 100644\n--- a/package.json\n+++ b/package.json\n@@ -60,6 +60,7 @@\n \"babel-cli\": \"^6.16.0\",\n \"babel-core\": \"^6.16.0\",\n \"babel-eslint\": \"^7.0.0\",\n+ \"babel-loader\": \"^6.2.5\",\n \"babel-plugin-transform-class-properties\": \"^6.10.2\",\n \"babel-plugin-transform-flow-strip-types\": \"^6.14.0\",\n \"babel-preset-es2015-node6\": \"^0.3.0\",\n@@ -82,6 +83,7 @@\n \"eslint-plugin-react\": \"^6.3.0\",\n \"flow-bin\": \"^0.33.0\",\n \"jsdom\": \"^9.4.2\",\n+ \"json-loader\": \"^0.5.4\",\n \"jsx-chai\": \"^4.0.0\",\n \"mocha\": \"^3.0.2\",\n \"mock-require\": \"^1.3.0\",\n@@ -91,6 +93,8 @@\n \"rimraf\": \"^2.5.2\",\n \"sinon\": \"^1.17.6\",\n \"sinon-chai\": \"^2.8.0\",\n- \"watch\": \"^1.0.0\"\n+ \"source-map-support\": \"^0.4.3\",\n+ \"watch\": \"^1.0.0\",\n+ \"webpack\": \"^1.13.2\"\n }\n }\ndiff --git a/webpack.config.js b/webpack.config.js\nnew file mode 100644\nindex 0000000..0ca6da1\n--- /dev/null\n+++ b/webpack.config.js\n@@ -0,0 +1,44 @@\n+const webpack = require('webpack');\n+const path = require('path');\n+const fs = require('fs');\n+\n+const nodeModules = {\n+ zmq: 'commonjs zmq',\n+ jmp: 'commonjs jmp',\n+ github: 'commonjs github',\n+};\n+\n+module.exports = {\n+ entry: './src/notebook/index.js',\n+ target: 'electron-renderer',\n+ output: {\n+ path: path.join(__dirname, 'app', 'build'),\n+ filename: 'webpacked-notebook.js'\n+ },\n+ module: {\n+ loaders: [\n+ { test: /\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\n+ { test: /\\.json$/, loader: 'json-loader' },\n+ ]\n+ },\n+ resolve: {\n+ extensions: ['', '.js', '.jsx'],\n+ root: path.join(__dirname, 'app'),\n+ // Webpack 1\n+ modulesDirectories: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ path.resolve(__dirname, 'node_modules'),\n+ ],\n+ // Webpack 2\n+ modules: [\n+ path.resolve(__dirname, 'app', 'node_modules'),\n+ ],\n+ },\n+ externals: nodeModules,\n+ plugins: [\n+ new webpack.IgnorePlugin(/\\.(css|less)$/),\n+ new webpack.BannerPlugin('require(\"source-map-support\").install();',\n+ { raw: true, entryOnly: false })\n+ ],\n+ devtool: 'sourcemap'\n+};\n"]
5
["8d53d724275ebe4b2a0bb0bd7e2c2dfc399e049b", "06960183db42cba1b1f1a8077660ba8c801c9e18", "9b059dd8245e72f0bf8c40fc633f9ef6fccae405", "3f2eec37f76c1ad9408e423e49fe5bfe3e17d943", "4ab28fc2e63e975a0c77e18ae644f34fa5f8771a"]
["refactor", "fix", "test", "ci", "build"]
rename step,coordinator accepts a request transformer instead of a list of operations The request transformer can generate the operations from the current topology. This helps to - ensure that the operations are generated based on the latest topology. When concurrent changes happens, coordinator can detect it. Previously it was unclear because by the time handle apply operations, the cluster topology might have changed. - return the simulated final topology as part of the result,baby go nyoom,simplyfy statement,selenium java 4.8.1
["diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n", "diff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\nindex 8bb5c3d..f8f5e24 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinator.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.topology.changes;\n import io.camunda.zeebe.scheduler.future.ActorFuture;\n import io.camunda.zeebe.topology.state.ClusterTopology;\n import io.camunda.zeebe.topology.state.TopologyChangeOperation;\n+import io.camunda.zeebe.util.Either;\n import java.util.List;\n \n public interface TopologyChangeCoordinator {\n@@ -39,4 +40,16 @@ public interface TopologyChangeCoordinator {\n ActorFuture<Boolean> hasCompletedChanges(final long version);\n \n ActorFuture<ClusterTopology> getCurrentTopology();\n+\n+ ActorFuture<TopologyChangeResult> applyOperations(TopologyChangeRequest request);\n+\n+ record TopologyChangeResult(\n+ ClusterTopology currentTopology,\n+ ClusterTopology finalTopology,\n+ List<TopologyChangeOperation> operations) {}\n+\n+ interface TopologyChangeRequest {\n+ Either<Exception, List<TopologyChangeOperation>> operations(\n+ final ClusterTopology currentTopology);\n+ }\n }\ndiff --git a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\nindex 13ec754..877fc3c 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/changes/TopologyChangeCoordinatorImpl.java\n@@ -103,6 +103,62 @@ public class TopologyChangeCoordinatorImpl implements TopologyChangeCoordinator \n return clusterTopologyManager.getClusterTopology();\n }\n \n+ @Override\n+ public ActorFuture<TopologyChangeResult> applyOperations(final TopologyChangeRequest request) {\n+ final ActorFuture<TopologyChangeResult> future = executor.createFuture();\n+ clusterTopologyManager\n+ .getClusterTopology()\n+ .onComplete(\n+ (currentClusterTopology, errorOnGettingTopology) -> {\n+ if (errorOnGettingTopology != null) {\n+ future.completeExceptionally(errorOnGettingTopology);\n+ return;\n+ }\n+\n+ final var operationsEither = request.operations(currentClusterTopology);\n+ if (operationsEither.isLeft()) {\n+ future.completeExceptionally(operationsEither.getLeft());\n+ return;\n+ }\n+ final var operations = operationsEither.get();\n+ if (operations.isEmpty()) {\n+ // No operations to apply\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, currentClusterTopology, operations));\n+ return;\n+ }\n+\n+ final ActorFuture<ClusterTopology> validation =\n+ validateTopologyChangeRequest(currentClusterTopology, operations);\n+\n+ validation.onComplete(\n+ (simulatedFinalTopology, validationError) -> {\n+ if (validationError != null) {\n+ future.completeExceptionally(validationError);\n+ return;\n+ }\n+\n+ // if the validation was successful, apply the changes\n+ final ActorFuture<ClusterTopology> applyFuture = executor.createFuture();\n+ applyTopologyChange(\n+ operations, currentClusterTopology, simulatedFinalTopology, applyFuture);\n+\n+ applyFuture.onComplete(\n+ (ignore, error) -> {\n+ if (error == null) {\n+ future.complete(\n+ new TopologyChangeResult(\n+ currentClusterTopology, simulatedFinalTopology, operations));\n+ } else {\n+ future.completeExceptionally(error);\n+ }\n+ });\n+ });\n+ });\n+ return future;\n+ }\n+\n private ActorFuture<ClusterTopology> validateTopologyChangeRequest(\n final ClusterTopology currentClusterTopology,\n final List<TopologyChangeOperation> operations) {\n", "diff --git a/README.md b/README.md\nindex 38ce46d..9140b16 100644\n--- a/README.md\n+++ b/README.md\n@@ -39,7 +39,7 @@ Million makes creating user interfaces as easy as [React](https://reactjs.org), \n \n While alternative libraries like [Preact](https://preactjs.com/) reduce bundle sizes by efficient code design, Million takes it a step further by **leveraging compilation** to make a quantum leap in improving bundle size **and** render speed.\n \n-Think of it as if [React](https://preactjs.com/)'s API and [Svelte](https://svelte.dev/)'s compiler had a baby. [A baby with _**super speed! \ud83d\udc76**_](https://millionjs.org/benchmarks)\n+Think of it as if [React](https://preactjs.com/)'s API and [Svelte](https://svelte.dev/)'s compiler had a baby. [A baby with _**super speed! \ud83d\udc76\ud83d\ude80**_](https://millionjs.org/benchmarks)\n \n ### [**\ud83d\udcda Learn Million in 1 minute! \u2192**](https://millionjs.org/docs/start-here)\n \n", "diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n", "diff --git a/pom.xml b/pom.xml\nindex f792f3c..477224a 100644\n--- a/pom.xml\n+++ b/pom.xml\n@@ -60,8 +60,8 @@\n <codehaus-groovy.version>3.0.11</codehaus-groovy.version>\n <jython.version>2.7.0</jython.version>\n <docker-java.version>3.2.14</docker-java.version>\n- <selenium.version>4.8.0</selenium.version>\n- <jmeter-plugins-webdriver.version>4.8.0</jmeter-plugins-webdriver.version>\n+ <selenium.version>4.8.1</selenium.version>\n+ <jmeter-plugins-webdriver.version>4.8.1</jmeter-plugins-webdriver.version>\n <opentelemetry.version>1.22.0</opentelemetry.version>\n <oracle-database.version>19.7.0.0</oracle-database.version>\n <zookeeper.version>3.8.0</zookeeper.version>\n"]
5
["34875bc0e59b43d9041903101c823d25ec194a21", "dec860436916ef216998f80f8b2f9c39d00c064d", "eb07f64394ab2387f50a30d22667f9b5d0eca6c8", "f86944ff00b970d7e2da48abbff43e58bdf29b99", "66f907f2d6ff0956bb5215518678bc79cab83c17"]
["ci", "feat", "docs", "refactor", "build"]
remove broken link Fixes #1785,Fix windows build,disable edit/delete if primary key missing Signed-off-by: Pranav C <[email protected]>,permission check,add workflow to release branches
["diff --git a/docs/content/Caching/Caching.md b/docs/content/Caching/Caching.md\nindex d873a52..9706dda 100644\n--- a/docs/content/Caching/Caching.md\n+++ b/docs/content/Caching/Caching.md\n@@ -135,8 +135,9 @@ If nothing is found in the cache, the query is executed in the database and the \n is returned as well as updating the cache.\n \n If an existing value is present in the cache and the `refreshKey` value for\n-the query hasn't changed, the cached value will be returned. Otherwise, a\n-[query renewal](#in-memory-cache-force-query-renewal) will be performed.\n+the query hasn't changed, the cached value will be returned. Otherwise, a SQL query will be executed either against the pre-aggregations storage or the source database to populate the cache with the results and return them.\n+\n+\n \n ### Refresh Keys\n \n", "diff --git a/src/fs/mounts/mod.rs b/src/fs/mounts/mod.rs\nindex a7f8188..662e2f5 100644\n--- a/src/fs/mounts/mod.rs\n+++ b/src/fs/mounts/mod.rs\n@@ -29,11 +29,14 @@ impl std::error::Error for Error {}\n \n impl std::fmt::Display for Error {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n+ // Allow unreachable_patterns for windows build\n+ #[allow(unreachable_patterns)]\n match self {\n #[cfg(target_os = \"macos\")]\n Error::GetFSStatError(err) => write!(f, \"getfsstat failed: {err}\"),\n #[cfg(target_os = \"linux\")]\n- Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\")\n+ Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\"),\n+ _ => write!(f, \"Unknown error\"),\n }\n }\n }\n\\ No newline at end of file\ndiff --git a/src/main.rs b/src/main.rs\nindex 483e14d..ca28081 100644\n--- a/src/main.rs\n+++ b/src/main.rs\n@@ -62,6 +62,8 @@ mod theme;\n // to `clap` is complete.\n lazy_static! {\n static ref ALL_MOUNTS: HashMap<PathBuf, mounts::MountedFs> = {\n+ // Allow unused_mut for windows\n+ #[allow(unused_mut)]\n let mut mount_map: HashMap<PathBuf, mounts::MountedFs> = HashMap::new();\n \n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n", "diff --git a/packages/nc-gui/components/project/spreadsheet/components/expandedForm.vue b/packages/nc-gui/components/project/spreadsheet/components/expandedForm.vue\nindex 5f9841f..c414c8c 100644\n--- a/packages/nc-gui/components/project/spreadsheet/components/expandedForm.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/components/expandedForm.vue\n@@ -413,6 +413,9 @@ export default {\n \n await this.reload()\n } else if (Object.keys(updatedObj).length) {\n+ if (!id) {\n+ return this.$toast.info('Update not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n await this.api.update(id, updatedObj, this.oldRow)\n } else {\n return this.$toast.info('No columns to update').goAway(3000)\ndiff --git a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\nindex c2b4b81..1b9d6a0 100644\n--- a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n@@ -62,7 +62,15 @@\n <v-spacer class=\"h-100\" @dblclick=\"debug=true\" />\n \n <debug-metas v-if=\"debug\" class=\"mr-3\" />\n-\n+ <v-tooltip bottom>\n+ <template #activator=\"{on}\">\n+ <v-icon v-if=\"!isPkAvail\" color=\"warning\" small class=\"mr-3\" v-on=\"on\">\n+ mdi-information-outline\n+ </v-icon>\n+ </template>\n+ <span class=\"caption\"> Update & Delete not allowed since the table doesn't have any primary key\n+ </span>\n+ </v-tooltip>\n <lock-menu v-if=\"_isUIAllowed('view-type')\" v-model=\"viewStatus.type\" />\n <x-btn tooltip=\"Reload view data\" outlined small text @click=\"reload\">\n <v-icon small class=\"mr-1\" color=\"grey darken-3\">\n@@ -208,6 +216,7 @@\n :meta=\"meta\"\n :is-virtual=\"selectedView.type === 'vtable'\"\n :api=\"api\"\n+ :is-pk-avail=\"isPkAvail\"\n @onNewColCreation=\"onNewColCreation\"\n @onCellValueChange=\"onCellValueChange\"\n @insertNewRow=\"insertNewRow\"\n@@ -631,8 +640,8 @@ export default {\n if (\n !this.meta || (\n (this.meta.hasMany && this.meta.hasMany.length) ||\n- (this.meta.manyToMany && this.meta.manyToMany.length) ||\n- (this.meta.belongsTo && this.meta.belongsTo.length))\n+ (this.meta.manyToMany && this.meta.manyToMany.length) ||\n+ (this.meta.belongsTo && this.meta.belongsTo.length))\n ) {\n return this.$toast.info('Please delete relations before deleting table.').goAway(3000)\n }\n@@ -817,6 +826,10 @@ export default {\n \n const id = this.meta.columns.filter(c => c.pk).map(c => rowObj[c._cn]).join('___')\n \n+ if (!id) {\n+ return this.$toast.info('Update not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n+\n const newData = await this.api.update(id, {\n [column._cn]: rowObj[column._cn]\n }, { [column._cn]: oldRow[column._cn] })\n@@ -841,6 +854,11 @@ export default {\n const rowObj = this.rowContextMenu.row\n if (!this.rowContextMenu.rowMeta.new) {\n const id = this.meta && this.meta.columns && this.meta.columns.filter(c => c.pk).map(c => rowObj[c._cn]).join('___')\n+\n+ if (!id) {\n+ return this.$toast.info('Delete not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n+\n await this.api.delete(id)\n }\n this.data.splice(this.rowContextMenu.index, 1)\n@@ -859,6 +877,11 @@ export default {\n }\n if (!rowMeta.new) {\n const id = this.meta.columns.filter(c => c.pk).map(c => rowObj[c._cn]).join('___')\n+\n+ if (!id) {\n+ return this.$toast.info('Delete not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n+\n await this.api.delete(id)\n }\n this.data.splice(row, 1)\n@@ -991,6 +1014,9 @@ export default {\n }\n },\n computed: {\n+ isPkAvail() {\n+ return this.meta && this.meta.columns.some(c => c.pk)\n+ },\n isGallery() {\n return this.selectedView && this.selectedView.show_as === 'gallery'\n },\ndiff --git a/packages/nc-gui/components/project/spreadsheet/views/xcGridView.vue b/packages/nc-gui/components/project/spreadsheet/views/xcGridView.vue\nindex 5497d05..c198784 100644\n--- a/packages/nc-gui/components/project/spreadsheet/views/xcGridView.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/views/xcGridView.vue\n@@ -27,7 +27,7 @@\n @xcresized=\"resizingCol = null\"\n >\n <!-- :style=\"columnsWidth[col._cn] ? `min-width:${columnsWidth[col._cn]}; max-width:${columnsWidth[col._cn]}` : ''\"\n- -->\n+-->\n \n <virtual-header-cell\n v-if=\"col.virtual\"\n@@ -136,13 +136,13 @@\n :key=\"row + columnObj.alias\"\n class=\"cell pointer\"\n :class=\"{\n- 'active' : !isPublicView && selected.col === col && selected.row === row && isEditable ,\n+ 'active' :!isPublicView && selected.col === col && selected.row === row && isEditable ,\n 'primary-column' : primaryValueColumn === columnObj._cn,\n 'text-center': isCentrallyAligned(columnObj),\n 'required': isRequired(columnObj,rowObj)\n }\"\n :data-col=\"columnObj.alias\"\n- @dblclick=\"makeEditable(col,row,columnObj.ai)\"\n+ @dblclick=\"makeEditable(col,row,columnObj.ai,rowMeta)\"\n @click=\"makeSelected(col,row);\"\n @contextmenu=\"showRowContextMenu($event,rowObj,rowMeta,row,col, columnObj)\"\n >\n@@ -162,7 +162,8 @@\n \n <editable-cell\n v-else-if=\"\n- !isLocked\n+ (isPkAvail ||rowMeta.new) &&\n+ !isLocked\n && !isPublicView\n && (editEnabled.col === col && editEnabled.row === row)\n || enableEditable(columnObj)\n@@ -190,11 +191,11 @@\n :db-alias=\"nodes.dbAlias\"\n :value=\"rowObj[columnObj._cn]\"\n :sql-ui=\"sqlUi\"\n- @enableedit=\"makeSelected(col,row);makeEditable(col,row,columnObj.ai)\"\n+ @enableedit=\"makeSelected(col,row);makeEditable(col,row,columnObj.ai, rowMeta)\"\n />\n </td>\n </tr>\n- <tr v-if=\"!isLocked && !isPublicView && isEditable && relationType !== 'bt'\">\n+ <tr v-if=\"isPkAvail && !isLocked && !isPublicView && isEditable && relationType !== 'bt'\">\n <td :colspan=\"visibleColLength + 1\" class=\"text-left pointer\" @click=\"insertNewRow(true)\">\n <v-tooltip top>\n <template #activator=\"{on}\">\n@@ -214,7 +215,9 @@\n <!-- <div is=\"style\" v-html=\"resizeColStyle\" />-->\n <dynamic-style>\n <template v-if=\"resizingCol\">\n- [data-col=\"{{ resizingCol }}\"]{min-width:{{ resizingColWidth }};max-width:{{ resizingColWidth }};width:{{ resizingColWidth }};}\n+ [data-col=\"{{ resizingCol }}\"]{min-width:{{ resizingColWidth }};max-width:{{\n+ resizingColWidth\n+ }};width:{{ resizingColWidth }};}\n </template>\n </dynamic-style>\n </div>\n@@ -261,7 +264,8 @@ export default {\n table: String,\n isVirtual: Boolean,\n isLocked: Boolean,\n- columnsWidth: { type: Object }\n+ columnsWidth: { type: Object },\n+ isPkAvail: Boolean\n },\n data: () => ({\n resizingCol: null,\n@@ -426,6 +430,10 @@ export default {\n return\n }\n if (e.key && e.key.length === 1) {\n+ if (!this.isPkAvail && !this.data[this.selected.row].rowMeta.new) {\n+ return this.$toast.info('Update not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n+\n this.$set(this.data[this.selected.row].row, this.availableColumns[this.selected.col]._cn, '')\n this.editEnabled = { ...this.selected }\n }\n@@ -466,10 +474,14 @@ export default {\n this.editEnabled = {}\n }\n },\n- makeEditable(col, row) {\n+ makeEditable(col, row, _, rowMeta) {\n if (this.isPublicView || !this.isEditable) {\n return\n }\n+\n+ if (!this.isPkAvail && !rowMeta.new) {\n+ return this.$toast.info('Update not allowed for table which doesn\\'t have primary Key').goAway(3000)\n+ }\n if (this.availableColumns[col].ai) {\n return this.$toast.info('Auto Increment field is not editable').goAway(3000)\n }\n", "diff --git a/server/src/routes/course/index.ts b/server/src/routes/course/index.ts\nindex 557f5fb..bc0e490 100644\n--- a/server/src/routes/course/index.ts\n+++ b/server/src/routes/course/index.ts\n@@ -209,7 +209,7 @@ function addStudentApi(router: Router, logger: ILogger) {\n router.post('/student/:githubId/status', ...mentorValidators, updateStudentStatus(logger));\n router.post('/student/:githubId/status-self', courseGuard, selfUpdateStudentStatus(logger));\n router.get('/student/:githubId/score', courseGuard, getScoreByStudent(logger));\n- router.post('/student/:githubId/certificate', courseManagerGuard, ...validators, postStudentCertificate(logger));\n+ router.post('/student/:githubId/certificate', courseManagerGuard, validateGithubId, postStudentCertificate(logger));\n \n router.get('/students', courseSupervisorGuard, getStudents(logger));\n router.get('/students/csv', courseSupervisorGuard, getStudentsCsv(logger));\n", "diff --git a/.github/workflows/release-pr.yml b/.github/workflows/release-pr.yml\nnew file mode 100644\nindex 0000000..697ca8e\n--- /dev/null\n+++ b/.github/workflows/release-pr.yml\n@@ -0,0 +1,48 @@\n+name: release\n+\n+on:\n+ issue_comment:\n+ types: [created]\n+ contains: \"/trigger release\"\n+\n+env:\n+ # 7 GiB by default on GitHub, setting to 6 GiB\n+ NODE_OPTIONS: --max-old-space-size=6144\n+\n+jobs:\n+ release-pr:\n+ permissions:\n+ id-token: write\n+ runs-on: ubuntu-latest\n+ timeout-minutes: 20\n+\n+ steps:\n+ - name: Ensure action is by maintainer\n+ uses: octokit/[email protected]\n+ id: check_role\n+ with:\n+ route: GET /repos/danielroe/roe.dev/collaborators/${{ github.event.comment.user.login }}\n+ env:\n+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+\n+ - uses: actions/checkout@v3\n+ with:\n+ fetch-depth: 0\n+\n+ - run: corepack enable\n+ - uses: actions/setup-node@v3\n+ with:\n+ node-version: 20\n+ cache: \"pnpm\"\n+\n+ - name: Install dependencies\n+ run: pnpm install\n+\n+ - name: Build\n+ run: pnpm build\n+\n+ - name: Release Edge\n+ run: ./scripts/release-edge.sh\n+ env:\n+ NODE_AUTH_TOKEN: ${{ secrets.NODE_AUTH_TOKEN }}\n+ NPM_CONFIG_PROVENANCE: true\ndiff --git a/package.json b/package.json\nindex 1074dcd..48bb566 100644\n--- a/package.json\n+++ b/package.json\n@@ -5,7 +5,7 @@\n \"license\": \"MIT\",\n \"type\": \"module\",\n \"scripts\": {\n- \"build\": \"FORCE_COLOR=1 pnpm --filter './packages/**' prepack\",\n+ \"build\": \"pnpm --filter './packages/**' prepack\",\n \"build:stub\": \"pnpm --filter './packages/**' prepack --stub\",\n \"cleanup\": \"rimraf 'packages/**/node_modules' 'examples/**/node_modules' 'docs/node_modules' 'playground/node_modules' 'node_modules'\",\n \"dev\": \"pnpm play\",\n"]
5
["c351088bce98594c740a39546ce3655c91554a5d", "81ca000c6a7e7435809081c60be37dda23458ec8", "4d92f352741b04c8709319dfe5c8419654f3682c", "33c25b2f59c931a7f4af994365522221a7821dca", "bc28d536c0dd1061ac96cea0241857c1d4e4e0f2"]
["docs", "build", "feat", "fix", "ci"]
retry uploading pdb files on appveyor (#21561),add documentation to use react-native-paper with CRA (#874),add method to extract snapshot name from filename also corrected pattern, where the period was meant to match a period, not any character. related to zeebe-io/zeebe#876,add test case with multiple partitions for message,add prewatch script to core
["diff --git a/appveyor.yml b/appveyor.yml\nindex 9aca21e..8b54543 100644\n--- a/appveyor.yml\n+++ b/appveyor.yml\n@@ -146,12 +146,12 @@ build_script:\n - ps: >-\n if ($env:GN_CONFIG -eq 'release') {\n python electron\\script\\zip-symbols.py\n- appveyor PushArtifact out/Default/symbols.zip\n+ appveyor-retry appveyor PushArtifact out/Default/symbols.zip\n } else {\n # It's useful to have pdb files when debugging testing builds that are\n # built on CI.\n 7z a pdb.zip out\\Default\\*.pdb\n- appveyor PushArtifact pdb.zip\n+ appveyor-retry appveyor PushArtifact pdb.zip\n }\n - python electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.win.%TARGET_ARCH%.manifest\n test_script:\n", "diff --git a/docs/pages/4.react-native-web.md b/docs/pages/4.react-native-web.md\nindex 69e4e52..8d6ae2a 100644\n--- a/docs/pages/4.react-native-web.md\n+++ b/docs/pages/4.react-native-web.md\n@@ -16,6 +16,63 @@ To install `react-native-web`, run:\n yarn add react-native-web react-dom react-art\n ```\n \n+### Using CRA ([Create React App](https://github.com/facebook/create-react-app))\n+\n+Install [`react-app-rewired`](https://github.com/timarney/react-app-rewired) to override `webpack` configuration:\n+\n+```sh\n+yarn add --dev react-app-rewired\n+```\n+\n+[Configure `babel-loader`](#2-configure-babel-loader) using a new file called `config-overrides.js`:\n+\n+```js\n+module.exports = function override(config, env) {\n+ config.module.rules.push({\n+ test: /\\.js$/,\n+ exclude: /node_modules[/\\\\](?!react-native-paper|react-native-vector-icons|react-native-safe-area-view)/,\n+ use: {\n+ loader: \"babel-loader\",\n+ options: {\n+ // Disable reading babel configuration\n+ babelrc: false,\n+ configFile: false,\n+\n+ // The configration for compilation\n+ presets: [\n+ [\"@babel/preset-env\", { useBuiltIns: \"usage\" }],\n+ \"@babel/preset-react\",\n+ \"@babel/preset-flow\"\n+ ],\n+ plugins: [\n+ \"@babel/plugin-proposal-class-properties\",\n+ \"@babel/plugin-proposal-object-rest-spread\"\n+ ]\n+ }\n+ }\n+ });\n+\n+ return config;\n+};\n+```\n+\n+Change your script in `package.json`:\n+\n+```diff\n+/* package.json */\n+\n+ \"scripts\": {\n+- \"start\": \"react-scripts start\",\n++ \"start\": \"react-app-rewired start\",\n+- \"build\": \"react-scripts build\",\n++ \"build\": \"react-app-rewired build\",\n+- \"test\": \"react-scripts test --env=jsdom\",\n++ \"test\": \"react-app-rewired test --env=jsdom\"\n+}\n+```\n+\n+### Custom webpack setup\n+\n To install `webpack`, run:\n \n ```sh\n", "diff --git a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\nindex d8f4d89..e54e85a 100644\n--- a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\n+++ b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\n@@ -23,8 +23,9 @@ public class FsSnapshotStorageConfiguration\n {\n protected static final String CHECKSUM_ALGORITHM = \"SHA1\";\n \n- protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \"%s\" + File.separatorChar + \"%s-%d.snapshot\";\n- protected static final String SNAPSHOT_FILE_NAME_PATTERN = \"%s-(\\\\d+).snapshot\";\n+ protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \"%s-%d.snapshot\";\n+ protected static final String SNAPSHOT_FILE_PATH_TEMPLATE = \"%s\" + File.separatorChar + SNAPSHOT_FILE_NAME_TEMPLATE;\n+ protected static final String SNAPSHOT_FILE_NAME_PATTERN = \"%s-(\\\\d+)\\\\.snapshot\";\n \n protected static final String CHECKSUM_FILE_NAME_TEMPLATE = \"%s\" + File.separatorChar + \"%s-%d.\" + CHECKSUM_ALGORITHM.toLowerCase();\n \n@@ -50,7 +51,7 @@ public class FsSnapshotStorageConfiguration\n \n public String snapshotFileName(String name, long logPosition)\n {\n- return String.format(SNAPSHOT_FILE_NAME_TEMPLATE, rootPath, name, logPosition);\n+ return String.format(SNAPSHOT_FILE_PATH_TEMPLATE, rootPath, name, logPosition);\n }\n \n public String checksumFileName(String name, long logPosition)\n@@ -86,7 +87,7 @@ public class FsSnapshotStorageConfiguration\n return String.format(CHECKSUM_CONTENT_TEMPLATE, checksum, dataFileName);\n }\n \n- public String extractDigetsFromChecksumContent(String content)\n+ public String extractDigestFromChecksumContent(String content)\n {\n final int indexOfSeparator = content.indexOf(CHECKSUM_CONTENT_SEPARATOR);\n if (indexOfSeparator < 0)\n@@ -108,9 +109,18 @@ public class FsSnapshotStorageConfiguration\n return content.substring(indexOfSeparator + CHECKSUM_CONTENT_SEPARATOR.length());\n }\n \n+ public String getSnapshotNameFromFileName(final String fileName)\n+ {\n+ final String suffixPattern = String.format(SNAPSHOT_FILE_NAME_PATTERN, \"\");\n+ final Pattern pattern = Pattern.compile(suffixPattern);\n+ final String[] parts = pattern.split(fileName);\n+\n+ return parts[0];\n+ }\n+\n public String getSnapshotFileNameTemplate()\n {\n- return SNAPSHOT_FILE_NAME_TEMPLATE;\n+ return SNAPSHOT_FILE_PATH_TEMPLATE;\n }\n \n public String getChecksumFileNameTemplate()\n", "diff --git a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\nindex 693d1da..e3552d4 100644\n--- a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n+++ b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n@@ -74,7 +74,7 @@ public class SubscriptionCommandSender {\n new CloseWorkflowInstanceSubscriptionCommand();\n \n private final ClientTransport subscriptionClient;\n- private final IntArrayList partitionIds;\n+ private final IntArrayList partitionIds = new IntArrayList();\n \n private int partitionId;\n private TopologyPartitionListenerImpl partitionListener;\n@@ -82,7 +82,6 @@ public class SubscriptionCommandSender {\n public SubscriptionCommandSender(\n final ClusterCfg clusterCfg, final ClientTransport subscriptionClient) {\n this.subscriptionClient = subscriptionClient;\n- partitionIds = new IntArrayList();\n partitionIds.addAll(clusterCfg.getPartitionIds());\n }\n \n@@ -100,7 +99,8 @@ public class SubscriptionCommandSender {\n final DirectBuffer messageName,\n final DirectBuffer correlationKey) {\n \n- final int subscriptionPartitionId = getSubscriptionPartitionId(correlationKey);\n+ final int subscriptionPartitionId =\n+ SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n \n openMessageSubscriptionCommand.setSubscriptionPartitionId(subscriptionPartitionId);\n openMessageSubscriptionCommand.setWorkflowInstanceKey(workflowInstanceKey);\n@@ -111,14 +111,6 @@ public class SubscriptionCommandSender {\n return sendSubscriptionCommand(subscriptionPartitionId, openMessageSubscriptionCommand);\n }\n \n- private int getSubscriptionPartitionId(final DirectBuffer correlationKey) {\n- if (partitionIds == null) {\n- throw new IllegalStateException(\"no partition ids available\");\n- }\n-\n- return SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n- }\n-\n public boolean openWorkflowInstanceSubscription(\n final long workflowInstanceKey,\n final long elementInstanceKey,\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\nindex 4baed4f..838c9ca 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n@@ -36,7 +36,6 @@ import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n import io.zeebe.protocol.clientapi.RecordType;\n import io.zeebe.protocol.clientapi.ValueType;\n-import io.zeebe.protocol.impl.SubscriptionUtil;\n import io.zeebe.protocol.intent.DeploymentIntent;\n import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n@@ -44,7 +43,6 @@ import io.zeebe.protocol.intent.WorkflowInstanceSubscriptionIntent;\n import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n import io.zeebe.test.util.record.RecordingExporter;\n-import io.zeebe.util.buffer.BufferUtil;\n import java.util.List;\n import java.util.stream.Collectors;\n import org.agrona.DirectBuffer;\n@@ -171,39 +169,6 @@ public class MessageCatchElementTest {\n }\n \n @Test\n- public void shouldOpenMessageSubscriptionsOnSamePartition() {\n- // given\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n-\n- final String correlationKey = \"order-123\";\n-\n- final PartitionTestClient workflowPartition = apiRule.partitionClient(partitionIds.get(0));\n- final PartitionTestClient subscriptionPartition =\n- apiRule.partitionClient(getPartitionId(correlationKey));\n-\n- testClient.deploy(CATCH_EVENT_WORKFLOW);\n-\n- // when\n- final long workflowInstanceKey1 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- final long workflowInstanceKey2 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- // then\n- final List<Record<MessageSubscriptionRecordValue>> subscriptions =\n- subscriptionPartition\n- .receiveMessageSubscriptions()\n- .withIntent(MessageSubscriptionIntent.OPENED)\n- .limit(2)\n- .collect(Collectors.toList());\n-\n- assertThat(subscriptions)\n- .extracting(s -> s.getValue().getWorkflowInstanceKey())\n- .contains(workflowInstanceKey1, workflowInstanceKey2);\n- }\n-\n- @Test\n public void shouldOpenWorkflowInstanceSubscription() {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", \"order-123\"));\n@@ -352,10 +317,4 @@ public class MessageCatchElementTest {\n .exists())\n .isTrue();\n }\n-\n- private int getPartitionId(final String correlationKey) {\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n- return SubscriptionUtil.getSubscriptionPartitionId(\n- BufferUtil.wrapString(correlationKey), partitionIds.size());\n- }\n }\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..cf8261a\n--- /dev/null\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,134 @@\n+/*\n+ * Zeebe Broker Core\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * This program is free software: you can redistribute it and/or modify\n+ * it under the terms of the GNU Affero General Public License as published by\n+ * the Free Software Foundation, either version 3 of the License, or\n+ * (at your option) any later version.\n+ *\n+ * This program is distributed in the hope that it will be useful,\n+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\n+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n+ * GNU Affero General Public License for more details.\n+ *\n+ * You should have received a copy of the GNU Affero General Public License\n+ * along with this program. If not, see <http://www.gnu.org/licenses/>.\n+ */\n+package io.zeebe.broker.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.impl.SubscriptionUtil;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n+import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import io.zeebe.util.buffer.BufferUtil;\n+import java.util.List;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"receive-message\")\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+\n+ public ClientApiRule apiRule = new ClientApiRule(brokerRule::getClientAddress);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(apiRule);\n+\n+ private PartitionTestClient testClient;\n+\n+ @Before\n+ public void init() {\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_0)).isEqualTo(0);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_1)).isEqualTo(1);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_2)).isEqualTo(2);\n+\n+ testClient = apiRule.partitionClient();\n+\n+ testClient.deploy(WORKFLOW);\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ apiRule\n+ .partitionClient(0)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_0, asMsgPack(\"p\", \"p0\"));\n+ apiRule\n+ .partitionClient(1)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_1, asMsgPack(\"p\", \"p1\"));\n+ apiRule\n+ .partitionClient(2)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_2, asMsgPack(\"p\", \"p2\"));\n+\n+ // when\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ private int getPartitionId(final String correlationKey) {\n+ final List<Integer> partitionIds = apiRule.getPartitionIds();\n+ return SubscriptionUtil.getSubscriptionPartitionId(\n+ BufferUtil.wrapString(correlationKey), partitionIds.size());\n+ }\n+}\ndiff --git a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\nindex dac11a2..e2b8397 100644\n--- a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n+++ b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n@@ -329,6 +329,7 @@ public class PartitionTestClient {\n final String messageName, final String correlationKey, final byte[] payload, final long ttl) {\n return apiRule\n .createCmdRequest()\n+ .partitionId(partitionId)\n .type(ValueType.MESSAGE, MessageIntent.PUBLISH)\n .command()\n .put(\"name\", messageName)\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\nindex 9a122d9..b7db67e 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n@@ -619,14 +619,9 @@ public class BrokerReprocessingTest {\n }\n \n @Test\n- public void shouldCorrelateMessageAfterRestartIfEnteredBeforeA() throws Exception {\n+ public void shouldCorrelateMessageAfterRestartIfEnteredBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n final long workflowInstanceKey =\n startWorkflowInstance(PROCESS_ID, singletonMap(\"orderId\", \"order-123\"))\n@@ -658,12 +653,7 @@ public class BrokerReprocessingTest {\n @Test\n public void shouldCorrelateMessageAfterRestartIfPublishedBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n publishMessage(\"order canceled\", \"order-123\", singletonMap(\"foo\", \"bar\"));\n reprocessingTrigger.accept(this);\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\ndeleted file mode 100644\nindex c6a05fb..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\n+++ /dev/null\n@@ -1,176 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-import org.junit.runner.RunWith;\n-import org.junit.runners.Parameterized;\n-import org.junit.runners.Parameterized.Parameter;\n-import org.junit.runners.Parameterized.Parameters;\n-\n-@RunWith(Parameterized.class)\n-public class MessageCorrelationTest {\n-\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private static final BpmnModelInstance CATCH_EVENT_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- private static final BpmnModelInstance RECEIVE_TASK_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .receiveTask(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- @Parameter(0)\n- public String elementType;\n-\n- @Parameter(1)\n- public BpmnModelInstance workflow;\n-\n- @Parameters(name = \"{0}\")\n- public static final Object[][] parameters() {\n- return new Object[][] {\n- {\"intermediate message catch event\", CATCH_EVENT_WORKFLOW},\n- {\"receive task\", RECEIVE_TASK_WORKFLOW}\n- };\n- }\n-\n- @Before\n- public void init() {\n- final DeploymentEvent deploymentEvent =\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(workflow, \"wf.bpmn\")\n- .send()\n- .join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfEnteredBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"receive-message\");\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfPublishedBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageAndMergePayload() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .payload(Collections.singletonMap(\"foo\", \"bar\"))\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n-\n- assertElementCompleted(\n- \"wf\",\n- \"receive-message\",\n- (catchEventOccurredEvent) ->\n- assertThat(catchEventOccurredEvent.getPayloadAsMap())\n- .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\ndeleted file mode 100644\nindex 7845eec..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\n+++ /dev/null\n@@ -1,234 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.assertThatThrownBy;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.ZeebeFuture;\n-import io.zeebe.client.api.clients.WorkflowClient;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.client.api.events.WorkflowInstanceEvent;\n-import io.zeebe.client.cmd.ClientException;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.time.Duration;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-\n-public class PublishMessageTest {\n-\n- private static final BpmnModelInstance WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"catch-event\")\n- .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .endEvent()\n- .done();\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private WorkflowClient workflowClient;\n-\n- @Before\n- public void init() {\n-\n- workflowClient = clientRule.getClient().workflowClient();\n-\n- final DeploymentEvent deploymentEvent =\n- workflowClient.newDeployCommand().addWorkflowModel(WORKFLOW, \"wf.bpmn\").send().join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageToAllSubscriptions() {\n- // given\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageWithZeroTTL() {\n- // given\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"catch-event\");\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .send()\n- .join();\n-\n- // then\n- assertElementCompleted(\"wf\", \"catch-event\");\n- }\n-\n- @Test\n- public void shouldNotCorrelateMessageAfterTTL() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .payload(Collections.singletonMap(\"msg\", \"failure\"))\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ofMinutes(1))\n- .payload(Collections.singletonMap(\"msg\", \"expected\"))\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n-\n- assertElementCompleted(\n- \"wf\",\n- \"catch-event\",\n- (catchEventOccurred) ->\n- assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n- }\n-\n- @Test\n- public void shouldCorrelateMessageOnDifferentPartitions() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-124\")\n- .send()\n- .join();\n-\n- // when\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-124\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldRejectMessageWithSameId() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send()\n- .join();\n-\n- // when\n- final ZeebeFuture<Void> future =\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send();\n-\n- // then\n- assertThatThrownBy(future::join)\n- .isInstanceOf(ClientException.class)\n- .hasMessageContaining(\"message with id 'foo' is already published\");\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..0e37c95\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,196 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.intent.MessageIntent;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import java.util.Collections;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent()\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldPublishMessageOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+ });\n+\n+ // then\n+ assertThat(RecordingExporter.messageRecords(MessageIntent.PUBLISHED).limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+\n+ // when\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnSamePartitionsAfterRestart() {\n+ // given\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(15)\n+ .exists())\n+ .isTrue();\n+\n+ // when\n+ brokerRule.stopBroker();\n+ brokerRule.startBroker();\n+\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ private void createWorkflowInstance(Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+\n+ private void publishMessage(String correlationKey, Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"message\")\n+ .correlationKey(correlationKey)\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\nnew file mode 100644\nindex 0000000..3b08572\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\n@@ -0,0 +1,198 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.assertThatThrownBy;\n+import static org.assertj.core.api.Assertions.entry;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.ZeebeFuture;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.client.cmd.ClientException;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import java.time.Duration;\n+import java.util.Collections;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationTest {\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"catch-event\")\n+ .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n+ .endEvent()\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessage() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .payload(Collections.singletonMap(\"foo\", \"bar\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertWorkflowInstanceCompleted(PROCESS_ID);\n+\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurredEvent) ->\n+ assertThat(catchEventOccurredEvent.getPayloadAsMap())\n+ .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageWithZeroTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ assertElementActivated(\"catch-event\");\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(PROCESS_ID, \"catch-event\");\n+ }\n+\n+ @Test\n+ public void shouldNotCorrelateMessageAfterTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .payload(Collections.singletonMap(\"msg\", \"failure\"))\n+ .send()\n+ .join();\n+\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ofMinutes(1))\n+ .payload(Collections.singletonMap(\"msg\", \"expected\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurred) ->\n+ assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n+ }\n+\n+ @Test\n+ public void shouldRejectMessageWithSameId() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send()\n+ .join();\n+\n+ // when\n+ final ZeebeFuture<Void> future =\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send();\n+\n+ // then\n+ assertThatThrownBy(future::join)\n+ .isInstanceOf(ClientException.class)\n+ .hasMessageContaining(\"message with id 'foo' is already published\");\n+ }\n+}\n", "diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n"]
5
["7152173d26293f4638920b17ce2dfa8ae995193b", "ee7cc5d5a940fba774e715b1f029c6361110b108", "7ab965c55d0e98fdb6179577d0db56599675e400", "2d416be63eeec9e7fdb90a62c40c8ad8f0672efa", "aa0152baa4376b1087c86499a7c289b668d5ad55"]
["ci", "docs", "feat", "test", "build"]
do not pin time in tests but only skip ahead related to #573,dedup redundant imports,remove unnecessary spotless definition It receives this already from the parent pom.,fixa few issues,add method to extract snapshot name from filename also corrected pattern, where the period was meant to match a period, not any character. related to zeebe-io/zeebe#876
["diff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\nindex 636cd21..76afff7 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRecoveryTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.doRepeatedly;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n@@ -24,11 +26,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.assertj.core.util.Files;\n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -38,7 +47,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -48,9 +59,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.assertj.core.util.Files;\n-import org.junit.*;\n-import org.junit.rules.*;\n \n public class BrokerRecoveryTest\n {\n@@ -360,17 +368,12 @@ public class BrokerRecoveryTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n // wait until stream processor and scheduler process the lock task event which is not re-processed on recovery\n doRepeatedly(() ->\n {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n+ ClockUtil.addTime(Duration.ofSeconds(60)); // retriggers lock expiration check in broker\n return null;\n }).until(t -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n \ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\nindex 5ff1301..0ffe98d 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerRestartTest.java\n@@ -15,7 +15,9 @@\n */\n package io.zeebe.broker.it.startup;\n \n-import static io.zeebe.broker.it.util.TopicEventRecorder.*;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.incidentEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.taskEvent;\n+import static io.zeebe.broker.it.util.TopicEventRecorder.wfInstanceEvent;\n import static io.zeebe.test.util.TestUtil.waitUntil;\n import static org.assertj.core.api.Assertions.assertThat;\n \n@@ -23,11 +25,18 @@ import java.io.File;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.time.Instant;\n import java.util.Collections;\n import java.util.List;\n import java.util.regex.Pattern;\n \n+import org.junit.After;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.experimental.categories.Category;\n+import org.junit.rules.ExpectedException;\n+import org.junit.rules.RuleChain;\n+import org.junit.rules.TemporaryFolder;\n+\n import io.zeebe.broker.clustering.ClusterServiceNames;\n import io.zeebe.broker.it.ClientRule;\n import io.zeebe.broker.it.EmbeddedBrokerRule;\n@@ -37,7 +46,9 @@ import io.zeebe.client.ZeebeClient;\n import io.zeebe.client.clustering.impl.TopicLeader;\n import io.zeebe.client.clustering.impl.TopologyResponse;\n import io.zeebe.client.cmd.ClientCommandRejectedException;\n-import io.zeebe.client.event.*;\n+import io.zeebe.client.event.DeploymentEvent;\n+import io.zeebe.client.event.TaskEvent;\n+import io.zeebe.client.event.WorkflowInstanceEvent;\n import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.instance.WorkflowDefinition;\n import io.zeebe.raft.Raft;\n@@ -47,9 +58,6 @@ import io.zeebe.test.util.TestFileUtil;\n import io.zeebe.test.util.TestUtil;\n import io.zeebe.transport.SocketAddress;\n import io.zeebe.util.time.ClockUtil;\n-import org.junit.*;\n-import org.junit.experimental.categories.Category;\n-import org.junit.rules.*;\n \n public class BrokerRestartTest\n {\n@@ -360,11 +368,7 @@ public class BrokerRestartTest\n waitUntil(() -> !recordingTaskHandler.getHandledTasks().isEmpty());\n \n // when\n- restartBroker(() ->\n- {\n- final Instant now = ClockUtil.getCurrentTime();\n- ClockUtil.setCurrentTime(now.plusSeconds(60));\n- });\n+ restartBroker(() -> ClockUtil.addTime(Duration.ofSeconds(60)));\n \n waitUntil(() -> eventRecorder.hasTaskEvent(taskEvent(\"LOCK_EXPIRED\")));\n recordingTaskHandler.clear();\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\nindex 49b527d..a322fbe 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/task/TaskSubscriptionTest.java\n@@ -353,7 +353,7 @@ public class TaskSubscriptionTest\n waitUntil(() -> taskHandler.getHandledTasks().size() == 1);\n \n // when\n- ClockUtil.setCurrentTime(Instant.now().plus(Duration.ofMinutes(5)));\n+ ClockUtil.addTime(Duration.ofMinutes(5));\n \n // then\n waitUntil(() -> taskHandler.getHandledTasks().size() == 2);\n", "diff --git a/ibis/backends/base/__init__.py b/ibis/backends/base/__init__.py\nindex effd44c..a59c0ec 100644\n--- a/ibis/backends/base/__init__.py\n+++ b/ibis/backends/base/__init__.py\n@@ -31,7 +31,7 @@ import ibis.common.exceptions as exc\n import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n \n __all__ = ('BaseBackend', 'Database', 'connect')\n \ndiff --git a/ibis/backends/base/sql/__init__.py b/ibis/backends/base/sql/__init__.py\nindex e4f2129..7bbdaf9 100644\n--- a/ibis/backends/base/sql/__init__.py\n+++ b/ibis/backends/base/sql/__init__.py\n@@ -12,7 +12,7 @@ import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import BaseBackend\n from ibis.backends.base.sql.compiler import Compiler\n \ndiff --git a/ibis/backends/base/sql/alchemy/__init__.py b/ibis/backends/base/sql/alchemy/__init__.py\nindex 71cc0e8..ab89d7d 100644\n--- a/ibis/backends/base/sql/alchemy/__init__.py\n+++ b/ibis/backends/base/sql/alchemy/__init__.py\n@@ -11,7 +11,7 @@ import ibis\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.alchemy.database import AlchemyDatabase, AlchemyTable\n from ibis.backends.base.sql.alchemy.datatypes import (\ndiff --git a/ibis/backends/base/sql/alchemy/query_builder.py b/ibis/backends/base/sql/alchemy/query_builder.py\nindex 54c74ba..0ec432f 100644\n--- a/ibis/backends/base/sql/alchemy/query_builder.py\n+++ b/ibis/backends/base/sql/alchemy/query_builder.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import functools\n \n import sqlalchemy as sa\n-import sqlalchemy.sql as sql\n+from sqlalchemy import sql\n \n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/base/sql/compiler/base.py b/ibis/backends/base/sql/compiler/base.py\nindex 84102aa..fb44667 100644\n--- a/ibis/backends/base/sql/compiler/base.py\n+++ b/ibis/backends/base/sql/compiler/base.py\n@@ -7,7 +7,7 @@ import toolz\n \n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n class DML(abc.ABC):\ndiff --git a/ibis/backends/base/sql/compiler/query_builder.py b/ibis/backends/base/sql/compiler/query_builder.py\nindex a2d5214..95f5e8d 100644\n--- a/ibis/backends/base/sql/compiler/query_builder.py\n+++ b/ibis/backends/base/sql/compiler/query_builder.py\n@@ -8,7 +8,7 @@ import toolz\n import ibis.common.exceptions as com\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.compiler.base import DML, QueryAST, SetOp\n from ibis.backends.base.sql.compiler.select_builder import SelectBuilder, _LimitSpec\n from ibis.backends.base.sql.compiler.translator import ExprTranslator, QueryContext\ndiff --git a/ibis/backends/base/sql/registry/main.py b/ibis/backends/base/sql/registry/main.py\nindex 77f70a5..586ace5 100644\n--- a/ibis/backends/base/sql/registry/main.py\n+++ b/ibis/backends/base/sql/registry/main.py\n@@ -4,7 +4,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import (\n aggregate,\n binary_infix,\ndiff --git a/ibis/backends/base/sql/registry/timestamp.py b/ibis/backends/base/sql/registry/timestamp.py\nindex 412eab1..3c8571f 100644\n--- a/ibis/backends/base/sql/registry/timestamp.py\n+++ b/ibis/backends/base/sql/registry/timestamp.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n def extract_field(sql_attr):\ndiff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 8db6672..bb1b9ba 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -3,9 +3,9 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.backends.clickhouse.tests.conftest import (\n CLICKHOUSE_HOST,\n CLICKHOUSE_PASS,\ndiff --git a/ibis/backends/conftest.py b/ibis/backends/conftest.py\nindex 3a974da..ba7ad75 100644\n--- a/ibis/backends/conftest.py\n+++ b/ibis/backends/conftest.py\n@@ -20,7 +20,7 @@ if TYPE_CHECKING:\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import _get_backend_names\n \n TEST_TABLES = {\ndiff --git a/ibis/backends/dask/execution/util.py b/ibis/backends/dask/execution/util.py\nindex 61bff7e..7ed0c10 100644\n--- a/ibis/backends/dask/execution/util.py\n+++ b/ibis/backends/dask/execution/util.py\n@@ -9,13 +9,13 @@ import pandas as pd\n from dask.dataframe.groupby import SeriesGroupBy\n \n import ibis.backends.pandas.execution.util as pd_util\n-import ibis.common.graph as graph\n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n import ibis.util\n from ibis.backends.dask.core import execute\n from ibis.backends.pandas.trace import TraceTwoLevelDispatcher\n+from ibis.common import graph\n from ibis.expr.scope import Scope\n \n if TYPE_CHECKING:\ndiff --git a/ibis/backends/duckdb/datatypes.py b/ibis/backends/duckdb/datatypes.py\nindex fd6b8f5..52c0719 100644\n--- a/ibis/backends/duckdb/datatypes.py\n+++ b/ibis/backends/duckdb/datatypes.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import parsy as p\n import toolz\n \n-import ibis.util as util\n+from ibis import util\n from ibis.common.parsing import (\n COMMA,\n FIELD,\ndiff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\nindex 4ad2057..8299a28 100644\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -20,7 +20,7 @@ import ibis.config\n import ibis.expr.datatypes as dt\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.ddl import (\n CTAS,\ndiff --git a/ibis/backends/impala/client.py b/ibis/backends/impala/client.py\nindex 6655ce7..78d526f 100644\n--- a/ibis/backends/impala/client.py\n+++ b/ibis/backends/impala/client.py\n@@ -10,7 +10,7 @@ import sqlalchemy as sa\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import Database\n from ibis.backends.base.sql.compiler import DDL, DML\n from ibis.backends.base.sql.ddl import (\ndiff --git a/ibis/backends/impala/pandas_interop.py b/ibis/backends/impala/pandas_interop.py\nindex f410a8b..e687884 100644\n--- a/ibis/backends/impala/pandas_interop.py\n+++ b/ibis/backends/impala/pandas_interop.py\n@@ -22,7 +22,7 @@ from posixpath import join as pjoin\n import ibis.backends.pandas.client # noqa: F401\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.config import options\n \n \ndiff --git a/ibis/backends/impala/tests/conftest.py b/ibis/backends/impala/tests/conftest.py\nindex 1075ebe..a815be5 100644\n--- a/ibis/backends/impala/tests/conftest.py\n+++ b/ibis/backends/impala/tests/conftest.py\n@@ -13,8 +13,7 @@ import pytest\n \n import ibis\n import ibis.expr.types as ir\n-import ibis.util as util\n-from ibis import options\n+from ibis import options, util\n from ibis.backends.base import BaseBackend\n from ibis.backends.conftest import TEST_TABLES, _random_identifier\n from ibis.backends.impala.compiler import ImpalaCompiler, ImpalaExprTranslator\ndiff --git a/ibis/backends/impala/tests/test_client.py b/ibis/backends/impala/tests/test_client.py\nindex 0b56054..3fcca3a 100644\n--- a/ibis/backends/impala/tests/test_client.py\n+++ b/ibis/backends/impala/tests/test_client.py\n@@ -7,9 +7,9 @@ import pytz\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_ddl.py b/ibis/backends/impala/tests/test_ddl.py\nindex 870c4dc..2346a3d 100644\n--- a/ibis/backends/impala/tests/test_ddl.py\n+++ b/ibis/backends/impala/tests/test_ddl.py\n@@ -6,7 +6,7 @@ import ibis\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.ddl import fully_qualified_re\n from ibis.tests.util import assert_equal\n \ndiff --git a/ibis/backends/impala/tests/test_exprs.py b/ibis/backends/impala/tests/test_exprs.py\nindex cfc8552..1d6f44f 100644\n--- a/ibis/backends/impala/tests/test_exprs.py\n+++ b/ibis/backends/impala/tests/test_exprs.py\n@@ -5,10 +5,10 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.types as ir\n from ibis import literal as L\n from ibis.backends.impala.compiler import ImpalaCompiler\n+from ibis.expr import api\n from ibis.expr.datatypes import Category\n \n \ndiff --git a/ibis/backends/impala/tests/test_partition.py b/ibis/backends/impala/tests/test_partition.py\nindex 1f96e7d..44217a4 100644\n--- a/ibis/backends/impala/tests/test_partition.py\n+++ b/ibis/backends/impala/tests/test_partition.py\n@@ -6,7 +6,7 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_udf.py b/ibis/backends/impala/tests/test_udf.py\nindex 895918b..fd950d5 100644\n--- a/ibis/backends/impala/tests/test_udf.py\n+++ b/ibis/backends/impala/tests/test_udf.py\n@@ -9,11 +9,11 @@ import ibis\n import ibis.backends.impala as api\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n-import ibis.expr.rules as rules\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.impala import ddl\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import rules\n \n pytest.importorskip(\"impala\")\n \ndiff --git a/ibis/backends/impala/udf.py b/ibis/backends/impala/udf.py\nindex c6f2ef6..8b8b552 100644\n--- a/ibis/backends/impala/udf.py\n+++ b/ibis/backends/impala/udf.py\n@@ -21,7 +21,7 @@ import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.udf.validate as v\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import fixed_arity, sql_type_names\n from ibis.backends.impala.compiler import ImpalaExprTranslator\n \ndiff --git a/ibis/backends/mysql/__init__.py b/ibis/backends/mysql/__init__.py\nindex c0ddacb..50b331a 100644\n--- a/ibis/backends/mysql/__init__.py\n+++ b/ibis/backends/mysql/__init__.py\n@@ -8,7 +8,7 @@ import warnings\n from typing import Literal\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/mysql/compiler.py b/ibis/backends/mysql/compiler.py\nindex 13819cb..7456f71 100644\n--- a/ibis/backends/mysql/compiler.py\n+++ b/ibis/backends/mysql/compiler.py\n@@ -1,7 +1,7 @@\n from __future__ import annotations\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\ndiff --git a/ibis/backends/postgres/tests/test_functions.py b/ibis/backends/postgres/tests/test_functions.py\nindex 33c6d2e..0f377e3 100644\n--- a/ibis/backends/postgres/tests/test_functions.py\n+++ b/ibis/backends/postgres/tests/test_functions.py\n@@ -11,9 +11,9 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis import literal as L\n from ibis.expr.window import rows_with_max_lookback\n \ndiff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py\nindex 1b42080..b994911 100644\n--- a/ibis/backends/pyspark/__init__.py\n+++ b/ibis/backends/pyspark/__init__.py\n@@ -14,8 +14,7 @@ import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.expr.types as types\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.compiler import Compiler, TableSetFormatter\n from ibis.backends.base.sql.ddl import (\n@@ -217,16 +216,16 @@ class Backend(BaseSQLBackend):\n **kwargs: Any,\n ) -> Any:\n \"\"\"Execute an expression.\"\"\"\n- if isinstance(expr, types.Table):\n+ if isinstance(expr, ir.Table):\n return self.compile(expr, timecontext, params, **kwargs).toPandas()\n- elif isinstance(expr, types.Column):\n+ elif isinstance(expr, ir.Column):\n # expression must be named for the projection\n if not expr.has_name():\n expr = expr.name(\"tmp\")\n return self.compile(\n expr.to_projection(), timecontext, params, **kwargs\n ).toPandas()[expr.get_name()]\n- elif isinstance(expr, types.Scalar):\n+ elif isinstance(expr, ir.Scalar):\n compiled = self.compile(expr, timecontext, params, **kwargs)\n if isinstance(compiled, Column):\n # attach result column to a fake DataFrame and\ndiff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py\nindex 0288062..ccc8a97 100644\n--- a/ibis/backends/pyspark/tests/test_ddl.py\n+++ b/ibis/backends/pyspark/tests/test_ddl.py\n@@ -5,7 +5,7 @@ import pytest\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pyspark = pytest.importorskip(\"pyspark\")\ndiff --git a/ibis/backends/sqlite/tests/test_client.py b/ibis/backends/sqlite/tests/test_client.py\nindex 95aa24d..ad64700 100644\n--- a/ibis/backends/sqlite/tests/test_client.py\n+++ b/ibis/backends/sqlite/tests/test_client.py\n@@ -5,8 +5,8 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.types as ir\n+from ibis import config\n \n pytest.importorskip(\"sqlalchemy\")\n \ndiff --git a/ibis/expr/format.py b/ibis/expr/format.py\nindex e3d48cd..85fab3f 100644\n--- a/ibis/expr/format.py\n+++ b/ibis/expr/format.py\n@@ -9,13 +9,13 @@ from typing import Any, Callable, Deque, Iterable, Mapping, Tuple\n import rich.pretty\n \n import ibis\n-import ibis.common.graph as graph\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n import ibis.expr.window as win\n-import ibis.util as util\n+from ibis import util\n+from ibis.common import graph\n \n Aliases = Mapping[ops.TableNode, int]\n Deps = Deque[Tuple[int, ops.TableNode]]\ndiff --git a/ibis/expr/operations/relations.py b/ibis/expr/operations/relations.py\nindex 080ddcd..de44a15 100644\n--- a/ibis/expr/operations/relations.py\n+++ b/ibis/expr/operations/relations.py\n@@ -11,7 +11,7 @@ import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute\n from ibis.expr.deferred import Deferred\n from ibis.expr.operations.core import Named, Node, Value\ndiff --git a/ibis/expr/rules.py b/ibis/expr/rules.py\nindex 9b1a3b7..d40700e 100644\n--- a/ibis/expr/rules.py\n+++ b/ibis/expr/rules.py\n@@ -11,7 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute, optional\n from ibis.common.validators import (\n bool_,\ndiff --git a/ibis/expr/timecontext.py b/ibis/expr/timecontext.py\nindex 7ecd8e7..9620d6c 100644\n--- a/ibis/expr/timecontext.py\n+++ b/ibis/expr/timecontext.py\n@@ -38,8 +38,8 @@ from typing import TYPE_CHECKING, Any\n import numpy as np\n \n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.operations as ops\n+from ibis import config\n \n if TYPE_CHECKING:\n import pandas as pd\ndiff --git a/ibis/expr/types/groupby.py b/ibis/expr/types/groupby.py\nindex 138f92e..97aaaa2 100644\n--- a/ibis/expr/types/groupby.py\n+++ b/ibis/expr/types/groupby.py\n@@ -22,7 +22,7 @@ from typing import Iterable, Sequence\n import ibis.expr.analysis as an\n import ibis.expr.types as ir\n import ibis.expr.window as _window\n-import ibis.util as util\n+from ibis import util\n from ibis.expr.deferred import Deferred\n \n _function_types = tuple(\ndiff --git a/ibis/expr/window.py b/ibis/expr/window.py\nindex 5ef3bb1..3e0efdc 100644\n--- a/ibis/expr/window.py\n+++ b/ibis/expr/window.py\n@@ -11,7 +11,7 @@ import toolz\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.exceptions import IbisInputError\n from ibis.common.grounds import Comparable\n \ndiff --git a/ibis/tests/expr/test_decimal.py b/ibis/tests/expr/test_decimal.py\nindex 85d8eb2..12b809b 100644\n--- a/ibis/tests/expr/test_decimal.py\n+++ b/ibis/tests/expr/test_decimal.py\n@@ -3,10 +3,10 @@ import operator\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_type_metadata(lineitem):\ndiff --git a/ibis/tests/expr/test_interactive.py b/ibis/tests/expr/test_interactive.py\nindex cea1945..0c5613b 100644\n--- a/ibis/tests/expr/test_interactive.py\n+++ b/ibis/tests/expr/test_interactive.py\n@@ -14,7 +14,7 @@\n \n import pytest\n \n-import ibis.config as config\n+from ibis import config\n from ibis.tests.expr.mocks import MockBackend\n \n \ndiff --git a/ibis/tests/expr/test_table.py b/ibis/tests/expr/test_table.py\nindex 04f4a7d..3f77985 100644\n--- a/ibis/tests/expr/test_table.py\n+++ b/ibis/tests/expr/test_table.py\n@@ -10,13 +10,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as an\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n from ibis import literal as L\n from ibis.common.exceptions import RelationError\n+from ibis.expr import api\n from ibis.expr.types import Column, Table\n from ibis.tests.expr.mocks import MockAlchemyBackend, MockBackend\n from ibis.tests.util import assert_equal, assert_pickle_roundtrip\ndiff --git a/ibis/tests/expr/test_temporal.py b/ibis/tests/expr/test_temporal.py\nindex e76e71c..9a0f43f 100644\n--- a/ibis/tests/expr/test_temporal.py\n+++ b/ibis/tests/expr/test_temporal.py\n@@ -5,10 +5,10 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_temporal_literals():\ndiff --git a/ibis/tests/expr/test_timestamp.py b/ibis/tests/expr/test_timestamp.py\nindex 6601c8b..7782787 100644\n--- a/ibis/tests/expr/test_timestamp.py\n+++ b/ibis/tests/expr/test_timestamp.py\n@@ -5,11 +5,11 @@ import pandas as pd\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_field_select(alltypes):\ndiff --git a/ibis/tests/expr/test_value_exprs.py b/ibis/tests/expr/test_value_exprs.py\nindex 4c3d475..9eb247c 100644\n--- a/ibis/tests/expr/test_value_exprs.py\n+++ b/ibis/tests/expr/test_value_exprs.py\n@@ -15,13 +15,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as L\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n from ibis import _, literal\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import api\n from ibis.tests.util import assert_equal\n \n \ndiff --git a/ibis/tests/expr/test_visualize.py b/ibis/tests/expr/test_visualize.py\nindex 5525944..253564f 100644\n--- a/ibis/tests/expr/test_visualize.py\n+++ b/ibis/tests/expr/test_visualize.py\n@@ -9,8 +9,8 @@ import ibis.expr.types as ir\n \n pytest.importorskip('graphviz')\n \n-import ibis.expr.api as api # noqa: E402\n import ibis.expr.visualize as viz # noqa: E402\n+from ibis.expr import api # noqa: E402\n \n pytestmark = pytest.mark.skipif(\n int(os.environ.get('CONDA_BUILD', 0)) == 1, reason='CONDA_BUILD defined'\ndiff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 2ad5453..3aa8c3d 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -15,8 +15,8 @@\n import operator\n \n import pytest\n-import sqlalchemy.sql as sql\n from sqlalchemy import func as F\n+from sqlalchemy import sql\n from sqlalchemy import types as sat\n \n import ibis\ndiff --git a/ibis/tests/util.py b/ibis/tests/util.py\nindex f79d09a..025bfc7 100644\n--- a/ibis/tests/util.py\n+++ b/ibis/tests/util.py\n@@ -5,7 +5,7 @@ from __future__ import annotations\n import pickle\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n \n \n def assert_equal(left, right):\ndiff --git a/pyproject.toml b/pyproject.toml\nindex f2146d4..492ad9e 100644\n--- a/pyproject.toml\n+++ b/pyproject.toml\n@@ -310,6 +310,7 @@ select = [\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n+ \"PLR\", # pylint import style\n \"PLW\", # pylint\n \"RET\", # flake8-return\n \"RUF\", # ruff-specific rules\n", "diff --git a/benchmarks/project/pom.xml b/benchmarks/project/pom.xml\nindex 62030b6..ab87dea 100644\n--- a/benchmarks/project/pom.xml\n+++ b/benchmarks/project/pom.xml\n@@ -123,11 +123,6 @@\n </plugin>\n \n <plugin>\n- <groupId>com.diffplug.spotless</groupId>\n- <artifactId>spotless-maven-plugin</artifactId>\n- </plugin>\n-\n- <plugin>\n <groupId>org.apache.maven.plugins</groupId>\n <artifactId>maven-shade-plugin</artifactId>\n <executions>\n", "diff --git a/README.md b/README.md\nindex d944d22..5099f03 100644\n--- a/README.md\n+++ b/README.md\n@@ -10,9 +10,8 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n \n <!-- toc -->\n \n-* [Motivation](#motivation)\n+* [Introduction](#introduction)\n * [Installation](#installation)\n- + [Setting up a quick project](#setting-up-a-quick-project)\n * [Usage](#usage)\n + [Creating stores](#creating-stores)\n + [Creating reactive views](#creating-reactive-views)\n@@ -35,12 +34,14 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n Easy State consists of two wrapper functions only. `store` creates state stores and `view` creates reactive components, which re-render whenever state stores are mutated. The rest is just plain JavaScript.\n \n ```js\n-import React, from 'react'\n+import React from 'react'\n import { store, view } from 'react-easy-state'\n \n+// stores are normal objects\n const clock = store({ time: new Date() })\n setInterval(() => clock.time = new Date(), 1000)\n \n+// reactive components re-render on store mutations\n function ClockComp () {\n return <div>{clock.time}</div>\n }\n", "diff --git a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\nindex d8f4d89..e54e85a 100644\n--- a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\n+++ b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\n@@ -23,8 +23,9 @@ public class FsSnapshotStorageConfiguration\n {\n protected static final String CHECKSUM_ALGORITHM = \"SHA1\";\n \n- protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \"%s\" + File.separatorChar + \"%s-%d.snapshot\";\n- protected static final String SNAPSHOT_FILE_NAME_PATTERN = \"%s-(\\\\d+).snapshot\";\n+ protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \"%s-%d.snapshot\";\n+ protected static final String SNAPSHOT_FILE_PATH_TEMPLATE = \"%s\" + File.separatorChar + SNAPSHOT_FILE_NAME_TEMPLATE;\n+ protected static final String SNAPSHOT_FILE_NAME_PATTERN = \"%s-(\\\\d+)\\\\.snapshot\";\n \n protected static final String CHECKSUM_FILE_NAME_TEMPLATE = \"%s\" + File.separatorChar + \"%s-%d.\" + CHECKSUM_ALGORITHM.toLowerCase();\n \n@@ -50,7 +51,7 @@ public class FsSnapshotStorageConfiguration\n \n public String snapshotFileName(String name, long logPosition)\n {\n- return String.format(SNAPSHOT_FILE_NAME_TEMPLATE, rootPath, name, logPosition);\n+ return String.format(SNAPSHOT_FILE_PATH_TEMPLATE, rootPath, name, logPosition);\n }\n \n public String checksumFileName(String name, long logPosition)\n@@ -86,7 +87,7 @@ public class FsSnapshotStorageConfiguration\n return String.format(CHECKSUM_CONTENT_TEMPLATE, checksum, dataFileName);\n }\n \n- public String extractDigetsFromChecksumContent(String content)\n+ public String extractDigestFromChecksumContent(String content)\n {\n final int indexOfSeparator = content.indexOf(CHECKSUM_CONTENT_SEPARATOR);\n if (indexOfSeparator < 0)\n@@ -108,9 +109,18 @@ public class FsSnapshotStorageConfiguration\n return content.substring(indexOfSeparator + CHECKSUM_CONTENT_SEPARATOR.length());\n }\n \n+ public String getSnapshotNameFromFileName(final String fileName)\n+ {\n+ final String suffixPattern = String.format(SNAPSHOT_FILE_NAME_PATTERN, \"\");\n+ final Pattern pattern = Pattern.compile(suffixPattern);\n+ final String[] parts = pattern.split(fileName);\n+\n+ return parts[0];\n+ }\n+\n public String getSnapshotFileNameTemplate()\n {\n- return SNAPSHOT_FILE_NAME_TEMPLATE;\n+ return SNAPSHOT_FILE_PATH_TEMPLATE;\n }\n \n public String getChecksumFileNameTemplate()\n"]
5
["7ece3a9a16780dc6c633bbd903d36ce0aefd6a8a", "8d53d724275ebe4b2a0bb0bd7e2c2dfc399e049b", "7f9721dc9bbf66a3712d59352f64ca089da139f0", "b8a664c1b10f4e30a3e221a14211a3cdaf90b7f4", "7ab965c55d0e98fdb6179577d0db56599675e400"]
["test", "refactor", "build", "docs", "feat"]
rename top-level to connection,updated test to use rows for action items references #279,Add the select function for logicflow,simplify loadFiles code,exclude github.io from link checking to avoid rate limiting
["diff --git a/docs/_quarto.yml b/docs/_quarto.yml\nindex 4e086c7..69471ee 100644\n--- a/docs/_quarto.yml\n+++ b/docs/_quarto.yml\n@@ -140,7 +140,7 @@ website:\n contents:\n - section: Expression API\n contents:\n- - reference/top_level.qmd\n+ - reference/connection.qmd\n - reference/expression-tables.qmd\n - reference/selectors.qmd\n - reference/expression-generic.qmd\n@@ -191,10 +191,10 @@ quartodoc:\n contents:\n - kind: page\n package: ibis\n- path: top_level\n+ path: connection\n summary:\n- name: Top-level APIs\n- desc: Methods and objects available directly on the `ibis` module.\n+ name: Connection API\n+ desc: Create and manage backend connections.\n contents:\n - name: connect\n package: ibis.backends.base\n", "diff --git a/ionic/components/card/test/advanced/main.html b/ionic/components/card/test/advanced/main.html\nindex 7c56a7d..c19ea12 100644\n--- a/ionic/components/card/test/advanced/main.html\n+++ b/ionic/components/card/test/advanced/main.html\n@@ -19,16 +19,20 @@\n </p>\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left>\n- <icon star></icon>\n- Star\n- </button>\n- <button clear item-right class=\"activated\">\n- <icon share></icon>\n- Share.activated\n- </button>\n- </ion-item>\n+ <ion-row no-padding>\n+ <ion-col>\n+ <button clear small>\n+ <icon star></icon>\n+ Star\n+ </button>\n+ </ion-col>\n+ <ion-col text-right>\n+ <button clear small class=\"activated\">\n+ <icon share></icon>\n+ Share.activated\n+ </button>\n+ </ion-col>\n+ </ion-row>\n \n </ion-card>\n \n@@ -51,19 +55,24 @@\n <p>Hello. I am a paragraph.</p>\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left danger class=\"activated\">\n- <icon star></icon>\n- Favorite.activated\n- </button>\n- <button clear item-left danger>\n- <icon musical-notes></icon>\n- Listen\n- </button>\n- <ion-note item-right>\n- Right Note\n- </ion-note>\n- </ion-item>\n+ <ion-row center no-padding>\n+ <ion-col width-75>\n+ <button clear small danger class=\"activated\">\n+ <icon star></icon>\n+ Favorite.activated\n+ </button>\n+ <button clear small danger>\n+ <icon musical-notes></icon>\n+ Listen\n+ </button>\n+ </ion-col>\n+ <ion-col text-right>\n+ <button clear small>\n+ <icon share></icon>\n+ Share\n+ </button>\n+ </ion-col>\n+ </ion-row>\n </ion-card>\n \n <ion-card>\n@@ -76,20 +85,27 @@\n This card was breaking the border radius.\n </ion-card-content>\n \n- <ion-item>\n- <button clear item-left dark>\n- <icon star></icon>\n- Favorite\n- </button>\n- <button clear item-right dark>\n- <icon musical-notes></icon>\n- Listen\n- </button>\n- <button clear item-right dark>\n- <icon share-alt></icon>\n- Share\n- </button>\n- </ion-item>\n+ <ion-row text-center no-padding>\n+ <ion-col>\n+ <button clear small dark>\n+ <icon star></icon>\n+ Favorite\n+ </button>\n+ </ion-col>\n+\n+ <ion-col>\n+ <button clear small dark>\n+ <icon musical-notes></icon>\n+ Listen\n+ </button>\n+ </ion-col>\n+ <ion-col>\n+ <button clear small dark>\n+ <icon share-alt></icon>\n+ Share\n+ </button>\n+ </ion-col>\n+ </ion-row>\n \n </ion-card>\n \n", "diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\nindex 0d913b7..dcc59b3 100644\n--- a/packages/core/src/LogicFlow.tsx\n+++ b/packages/core/src/LogicFlow.tsx\n@@ -276,6 +276,12 @@ export default class LogicFlow {\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\n }\n /**\n+ * \u5c06\u56fe\u5f62\u9009\u4e2d\n+ */\n+ select(id: string) {\n+ this.graphModel.selectElementById(id);\n+ }\n+ /**\n * \u5c06\u56fe\u5f62\u5b9a\u4f4d\u5230\u753b\u5e03\u4e2d\u5fc3\n * @param focusOnArgs \u652f\u6301\u7528\u6237\u4f20\u5165\u56fe\u5f62\u5f53\u524d\u7684\u5750\u6807\u6216id\uff0c\u53ef\u4ee5\u901a\u8fc7type\u6765\u533a\u5206\u662f\u8282\u70b9\u8fd8\u662f\u8fde\u7ebf\u7684id\uff0c\u4e5f\u53ef\u4ee5\u4e0d\u4f20\uff08\u515c\u5e95\uff09\n */\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\nindex 94d0899..10280a9 100644\n--- a/packages/core/src/model/GraphModel.ts\n+++ b/packages/core/src/model/GraphModel.ts\n@@ -481,6 +481,13 @@ class GraphModel {\n this.selectElement?.setSelected(true);\n }\n \n+ @action\n+ selectElementById(id: string) {\n+ this.selectElement?.setSelected(false);\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\n+ this.selectElement?.setSelected(true);\n+ }\n+\n /* \u4fee\u6539\u8fde\u7ebf\u7c7b\u578b */\n @action\n changeEdgeType(type: string): void {\n", "diff --git a/frontend/app/player/web/network/loadFiles.ts b/frontend/app/player/web/network/loadFiles.ts\nindex ec174fc..d164333 100644\n--- a/frontend/app/player/web/network/loadFiles.ts\n+++ b/frontend/app/player/web/network/loadFiles.ts\n@@ -1,43 +1,33 @@\n import APIClient from 'App/api_client';\n \n-const NO_NTH_FILE = \"nnf\"\n-const NO_UNPROCESSED_FILES = \"nuf\"\n+const NO_FILE_OK = \"No-file-but-this-is-ok\"\n+const NO_BACKUP_FILE = \"No-efs-file\"\n \n export const loadFiles = (\n urls: string[],\n onData: (data: Uint8Array) => void,\n ): Promise<void> => {\n- const firstFileURL = urls[0]\n- urls = urls.slice(1)\n- if (!firstFileURL) {\n+ if (!urls.length) {\n return Promise.reject(\"No urls provided\")\n }\n- return window.fetch(firstFileURL)\n- .then(r => {\n- return processAPIStreamResponse(r, true)\n- })\n- .then(onData)\n- .then(() =>\n- urls.reduce((p, url) =>\n- p.then(() =>\n- window.fetch(url)\n- .then(r => {\n- return processAPIStreamResponse(r, false)\n- })\n- .then(onData)\n- ),\n- Promise.resolve(),\n- )\n+ return urls.reduce((p, url, index) =>\n+ p.then(() =>\n+ window.fetch(url)\n+ .then(r => {\n+ return processAPIStreamResponse(r, index===0)\n+ })\n+ .then(onData)\n+ ),\n+ Promise.resolve(),\n )\n .catch(e => {\n- if (e === NO_NTH_FILE) {\n+ if (e === NO_FILE_OK) {\n return\n }\n throw e\n })\n }\n \n-\n export async function requestEFSDom(sessionId: string) {\n return await requestEFSMobFile(sessionId + \"/dom.mob\")\n }\n@@ -50,21 +40,18 @@ async function requestEFSMobFile(filename: string) {\n const api = new APIClient()\n const res = await api.fetch('/unprocessed/' + filename)\n if (res.status >= 400) {\n- throw NO_UNPROCESSED_FILES\n+ throw NO_BACKUP_FILE\n }\n return await processAPIStreamResponse(res, false)\n }\n \n-const processAPIStreamResponse = (response: Response, isFirstFile: boolean) => {\n+const processAPIStreamResponse = (response: Response, canBeMissed: boolean) => {\n return new Promise<ArrayBuffer>((res, rej) => {\n- if (response.status === 404 && !isFirstFile) {\n- return rej(NO_NTH_FILE)\n+ if (response.status === 404 && canBeMissed) {\n+ return rej(NO_FILE_OK)\n }\n if (response.status >= 400) {\n- return rej(\n- isFirstFile ? `no start file. status code ${ response.status }`\n- : `Bad endfile status code ${response.status}`\n- )\n+ return rej(`Bad file status code ${response.status}. Url: ${response.url}`)\n }\n res(response.arrayBuffer())\n }).then(buffer => new Uint8Array(buffer))\n", "diff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 90c5a27..db6457b 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -101,6 +101,7 @@ jobs:\n --exclude-mail \\\n --exclude fonts.gstatic.com \\\n --exclude github.com \\\n+ --exclude github.io \\\n --no-progress \\\n --github-token ${{ steps.generate_token.outputs.token }}\n \n"]
5
["9b9cd037645ec716a45b70137f8d2f01ec9ab90c", "19feaea1885eb015759b5c7a5d785521f2b8a212", "6ae067153cd2608018fd3da76bd6d00a08da4b3a", "983fef55ef08ca2ca25349bb2d5bdff10ecf89f4", "ce0539a32b927a3559feebf8f5307e3863e992a1"]
["docs", "test", "feat", "refactor", "ci"]
move toolbar to tab content level Signed-off-by: Pranav C <[email protected]>,update get-started,pin version of actionlint used,init environ cache,verify the replay mode * write a test to verify the different replay modes
["diff --git a/packages/nc-gui-v2/components.d.ts b/packages/nc-gui-v2/components.d.ts\nindex f6be04b..cf555ef 100644\n--- a/packages/nc-gui-v2/components.d.ts\n+++ b/packages/nc-gui-v2/components.d.ts\n@@ -201,6 +201,7 @@ declare module '@vue/runtime-core' {\n MdiThumbUp: typeof import('~icons/mdi/thumb-up')['default']\n MdiTrashCan: typeof import('~icons/mdi/trash-can')['default']\n MdiTwitter: typeof import('~icons/mdi/twitter')['default']\n+ MdiUpload: typeof import('~icons/mdi/upload')['default']\n MdiUploadOutline: typeof import('~icons/mdi/upload-outline')['default']\n MdiViewListOutline: typeof import('~icons/mdi/view-list-outline')['default']\n MdiWhatsapp: typeof import('~icons/mdi/whatsapp')['default']\ndiff --git a/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue b/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\nindex c2c87d3..27c0acc 100644\n--- a/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\n+++ b/packages/nc-gui-v2/components/smartsheet-toolbar/ViewActions.vue\n@@ -132,7 +132,7 @@ async function changeLockType(type: LockType) {\n <div>\n <a-dropdown>\n <a-button v-t=\"['c:actions']\" class=\"nc-actions-menu-btn nc-toolbar-btn\">\n- <div class=\"flex gap-2 align-center\">\n+ <div class=\"flex gap-2 items-center\">\n <component\n :is=\"viewIcons[selectedView?.type].icon\"\n class=\"nc-view-icon group-hover:hidden\"\n@@ -311,6 +311,6 @@ async function changeLockType(type: LockType) {\n \n <style scoped>\n .nc-locked-menu-item > div {\n- @apply grid grid-cols-[30px,auto] gap-2 p-2 align-center;\n+ @apply grid grid-cols-[30px,auto] gap-2 p-2 items-center;\n }\n </style>\ndiff --git a/packages/nc-gui-v2/components/smartsheet/Toolbar.vue b/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\nindex 5fa555f..d498871 100644\n--- a/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/Toolbar.vue\n@@ -36,7 +36,7 @@ const {isOpen} =useSidebar()\n \n <SmartsheetToolbarSearchData v-if=\"(isGrid || isGallery) && !isPublic\" class=\"shrink mr-2 ml-2\" />\n \n- <ToggleDrawer v-if=\"!isOpen\"/>\n+ <ToggleDrawer class=\"mr-2\"/>\n \n \n </div>\ndiff --git a/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue b/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\nindex 896ad62..77aee05 100644\n--- a/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/sidebar/index.vue\n@@ -99,6 +99,7 @@ function onCreate(view: GridType | FormType | KanbanType | GalleryType) {\n class=\"relative shadow-md h-full\"\n theme=\"light\"\n >\n+ <!--\n <Toolbar\n v-if=\"isOpen\"\n class=\"min-h-[var(--toolbar-height)] max-h-[var(--toolbar-height)]\"\n@@ -128,7 +129,7 @@ function onCreate(view: GridType | FormType | KanbanType | GalleryType) {\n <div v-if=\"!isForm\" class=\"dot\" />\n </template>\n </Toolbar>\n-\n+-->\n <div v-if=\"isOpen\" class=\"flex-1 flex flex-col\">\n <MenuTop @open-modal=\"openModal\" @deleted=\"loadViews\" @sorted=\"loadViews\" />\n \ndiff --git a/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue b/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\nindex 3e3d78a..8441450 100644\n--- a/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\n+++ b/packages/nc-gui-v2/components/smartsheet/sidebar/toolbar/ToggleDrawer.vue\n@@ -4,7 +4,7 @@ const { isOpen, toggle } = useSidebar({ storageKey: 'nc-right-sidebar' })\n </script>\n \n <template>\n- <a-tooltip :placement=\"isOpen ? 'bottomRight' : 'left'\" :mouse-enter-delay=\"0.8\">\n+<!-- <a-tooltip :placement=\"isOpen ? 'bottomRight' : 'left'\" :mouse-enter-delay=\"0.8\">\n <template #title> Toggle sidebar</template>\n \n <div class=\"nc-sidebar-right-item hover:after:(bg-primary bg-opacity-75) group nc-sidebar-add-row\">\n@@ -14,5 +14,11 @@ const { isOpen, toggle } = useSidebar({ storageKey: 'nc-right-sidebar' })\n @click=\"toggle(!isOpen)\"\n />\n </div>\n- </a-tooltip>\n+ </a-tooltip>-->\n+\n+ <a-button @click=\"toggle(!isOpen)\" size=\"small\">\n+ <div class=\"flex items-center gap-2\"> <MdiMenu/> Views\n+ </div>\n+ </a-button>\n+\n </template>\ndiff --git a/packages/nc-gui-v2/components/tabs/Smartsheet.vue b/packages/nc-gui-v2/components/tabs/Smartsheet.vue\nindex 4181996..7b7ec36 100644\n--- a/packages/nc-gui-v2/components/tabs/Smartsheet.vue\n+++ b/packages/nc-gui-v2/components/tabs/Smartsheet.vue\n@@ -83,11 +83,11 @@ watch(isLocked, (nextValue) => (treeViewIsLockedInj.value = nextValue), { immedi\n \n <SmartsheetForm v-else-if=\"isForm\" />\n </div>\n+ <SmartsheetSidebar class=\"nc-right-sidebar\" v-if=\"meta\" />\n </div>\n </template>\n </div>\n \n- <SmartsheetSidebar class=\"nc-right-sidebar\" v-if=\"meta\" />\n </div>\n </template>\n \n", "diff --git a/docs/src/go-client/get-started.md b/docs/src/go-client/get-started.md\nindex 4f4405f..a792e0e 100755\n--- a/docs/src/go-client/get-started.md\n+++ b/docs/src/go-client/get-started.md\n@@ -199,14 +199,12 @@ workflowKey:1 bpmnProcessId:\"order-process\" version:1 workflowInstanceKey:6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n \n ## Work on a task\n@@ -322,7 +320,7 @@ it encounters a problem while processing the job.\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/go-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n When you run the above example you should see similar output:\n \ndiff --git a/docs/src/go-client/java-get-started-monitor-1.gif b/docs/src/go-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/go-client/java-get-started-monitor-2.gif b/docs/src/go-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/go-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/go-client/zeebe-monitor-1.png b/docs/src/go-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-2.png b/docs/src/go-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/go-client/zeebe-monitor-3.png b/docs/src/go-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/go-client/zeebe-monitor-3.png and /dev/null differ\ndiff --git a/docs/src/introduction/quickstart.md b/docs/src/introduction/quickstart.md\nindex 70abacf..68be28b 100644\n--- a/docs/src/introduction/quickstart.md\n+++ b/docs/src/introduction/quickstart.md\n@@ -215,7 +215,7 @@ and completed by a [job worker](/basics/job-workers.html). A job worker is a\n long living process which repeatedly tries to activate jobs for a given job\n type and completes them after executing its business logic. The `zbctl` also\n provides a command to spawn simple job workers using an external command or\n-script. The job worker will receive for every job the payload as JSON object on\n+script. The job worker will receive for every job the workflow instance variables as JSON object on\n `stdin` and has to return its result also as JSON object on `stdout` if it\n handled the job successfully.\n \ndiff --git a/docs/src/java-client/get-started.md b/docs/src/java-client/get-started.md\nindex 54d2208..afc1fd4 100755\n--- a/docs/src/java-client/get-started.md\n+++ b/docs/src/java-client/get-started.md\n@@ -21,9 +21,9 @@ You will be guided through the following steps:\n * [Zeebe Modeler](https://github.com/zeebe-io/zeebe-modeler/releases)\n * [Zeebe Monitor](https://github.com/zeebe-io/zeebe-simple-monitor/releases)\n \n-Before you begin to setup your project please start the broker, i.e. by running the start up script \n-`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the \n-address `localhost:26500`, which is used as contact point in this guide. In case your broker is \n+Before you begin to setup your project please start the broker, i.e. by running the start up script\n+`bin/broker` or `bin/broker.bat` in the distribution. Per default the broker is binding to the\n+address `localhost:26500`, which is used as contact point in this guide. In case your broker is\n available under another address please adjust the broker contact point when building the client.\n \n ## Set up a project\n@@ -182,14 +182,12 @@ Workflow instance created. Key: 6\n \n You did it! You want to see how the workflow instance is executed?\n \n-Start the Zeebe Monitor using `java -jar zeebe-simple-monitor.jar`.\n+Start the Zeebe Monitor using `java -jar zeebe-simple-monitor-app-*.jar`.\n \n Open a web browser and go to <http://localhost:8080/>.\n \n-Connect to the broker and switch to the workflow instances view.\n-Here, you see the current state of the workflow instance which includes active jobs, completed activities, the payload and open incidents.\n-\n-![zeebe-monitor-step-1](/java-client/zeebe-monitor-1.png)\n+Here, you see the current state of the workflow instance.\n+![zeebe-monitor-step-1](/java-client/java-get-started-monitor-1.gif)\n \n ## Work on a job\n \n@@ -205,12 +203,9 @@ Insert a few service tasks between the start and the end event.\n You need to set the type of each task, which identifies the nature of the work to be performed.\n Set the type of the first task to 'payment-service'.\n \n-Optionally, you can define parameters of the task by adding headers.\n-Add the header `method = VISA` to the first task.\n-\n Save the BPMN diagram and switch back to the main class.\n \n-Add the following lines to create a [job worker][] for the first jobs type:\n+Add the following lines to create a job worker for the first jobs type:\n \n ```java\n package io.zeebe;\n@@ -227,10 +222,7 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Collect money\");\n \n // ...\n \n@@ -252,40 +244,29 @@ public class Application\n Run the program and verify that the job is processed. You should see the output:\n \n ```\n-Collect money using payment method: VISA\n+Collect money\n ```\n \n When you have a look at the Zeebe Monitor, then you can see that the workflow instance moved from the first service task to the next one:\n \n-![zeebe-monitor-step-2](/java-client/zeebe-monitor-2.png)\n+![zeebe-monitor-step-2](/java-client/java-get-started-monitor-2.gif)\n \n ## Work with data\n \n-Usually, a workflow is more than just tasks, there is also data flow.\n-The tasks need data as input and in order to produce data.\n+Usually, a workflow is more than just tasks, there is also a data flow. The worker gets the data from the workflow instance to do its work and send the result back to the workflow instance.\n \n-In Zeebe, the data is represented as a JSON document.\n-When you create a workflow instance, then you can pass the data as payload.\n-Within the workflow, you can use input and output mappings on tasks to control the data flow.\n+In Zeebe, the data is stored as key-value-pairs in form of variables. Variables can be set when the workflow instance is created. Within the workflow, variables can be read and modified by workers.\n \n-In our example, we want to create a workflow instance with the following data:\n+In our example, we want to create a workflow instance with the following variables:\n \n ```json\n-{\n- \"orderId\": 31243,\n- \"orderItems\": [435, 182, 376]\n-}\n+\"orderId\": 31243\n+\"orderItems\": [435, 182, 376]\n ```\n \n-The first task should take `orderId` as input and return `totalPrice` as result.\n-\n-Open the BPMN diagram and switch to the input-output-mappings of the first task.\n-Add the input mapping `$.orderId : $.orderId` and the output mapping `$.totalPrice : $.totalPrice`.\n+The first task should read `orderId` as input and return `totalPrice` as result.\n \n-Save the BPMN diagram and go back to the main class.\n-\n-Modify the create command and pass the data as variables.\n-Also, modify the job worker to read the jobs payload and complete the job with payload.\n+Modify the workflow instance create command and pass the data as variables. Also, modify the job worker to read the job variables and complete the job with a result.\n \n ```java\n package io.zeebe;\n@@ -313,23 +294,22 @@ public class Application\n .jobType(\"payment-service\")\n .handler((jobClient, job) ->\n {\n- final Map<String, Object> headers = job.getCustomHeaders();\n- final String method = (String) headers.get(\"method\");\n-\n- final Map<String, Object> payload = job.getPayloadAsMap();\n+ final Map<String, Object> variables = job.getVariablesAsMap();\n \n- System.out.println(\"Process order: \" + payload.get(\"orderId\"));\n- System.out.println(\"Collect money using payment method: \" + method);\n+ System.out.println(\"Process order: \" + variables.get(\"orderId\"));\n+ System.out.println(\"Collect money\");\n \n // ...\n \n- payload.put(\"totalPrice\", 46.50);\n+ final Map<String, Object> result = new HashMap<>();\n+ result.put(\"totalPrice\", 46.50);\n \n jobClient.newCompleteCommand(job.getKey())\n- .payload(payload)\n+ .variables(result)\n .send()\n .join();\n })\n+ .fetchVariables(\"orderId\")\n .open();\n \n // ...\n@@ -337,16 +317,16 @@ public class Application\n }\n ```\n \n-Run the program and verify that the payload is mapped into the job. You should see the output:\n+Run the program and verify that the variable is read. You should see the output:\n \n ```\n-Process order: {\"orderId\":31243}\n-Collect money using payment method: VISA\n+Process order: 31243\n+Collect money\n ```\n \n-When we have a look at the Zeebe Monitor, then we can see how the payload is modified after the activity:\n+When we have a look at the Zeebe Monitor, then we can see that the variable `totalPrice` is set:\n \n-![zeebe-monitor-step-3](/java-client/zeebe-monitor-3.png)\n+![zeebe-monitor-step-3](/java-client/java-get-started-monitor-3.gif)\n \n ## What's next?\n \ndiff --git a/docs/src/java-client/java-get-started-monitor-1.gif b/docs/src/java-client/java-get-started-monitor-1.gif\nnew file mode 100644\nindex 0000000..b86803a\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-1.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-2.gif b/docs/src/java-client/java-get-started-monitor-2.gif\nnew file mode 100644\nindex 0000000..8f0f2a4\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-2.gif differ\ndiff --git a/docs/src/java-client/java-get-started-monitor-3.gif b/docs/src/java-client/java-get-started-monitor-3.gif\nnew file mode 100644\nindex 0000000..1f6cb56\nBinary files /dev/null and b/docs/src/java-client/java-get-started-monitor-3.gif differ\ndiff --git a/docs/src/java-client/zeebe-monitor-1.png b/docs/src/java-client/zeebe-monitor-1.png\ndeleted file mode 100644\nindex 0075f3d..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-1.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-2.png b/docs/src/java-client/zeebe-monitor-2.png\ndeleted file mode 100644\nindex 6687bb0..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-2.png and /dev/null differ\ndiff --git a/docs/src/java-client/zeebe-monitor-3.png b/docs/src/java-client/zeebe-monitor-3.png\ndeleted file mode 100644\nindex bc15659..0000000\nBinary files a/docs/src/java-client/zeebe-monitor-3.png and /dev/null differ\n", "diff --git a/.github/workflows/introspect.yml b/.github/workflows/introspect.yml\nindex b6d9125..82d22a5 100644\n--- a/.github/workflows/introspect.yml\n+++ b/.github/workflows/introspect.yml\n@@ -25,5 +25,5 @@ jobs:\n # From https://github.com/rhysd/actionlint/blob/main/docs/usage.md#use-actionlint-on-github-actions\n - name: Check workflow files\n run: |\n- bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/590d3bd9dde0c91f7a66071d40eb84716526e5a6/scripts/download-actionlint.bash)\n+ bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/590d3bd9dde0c91f7a66071d40eb84716526e5a6/scripts/download-actionlint.bash) 1.6.25\n ./actionlint -color -shellcheck=\"\"\n", "diff --git a/src/environment.go b/src/environment.go\nindex ae5e26a..0c961c5 100644\n--- a/src/environment.go\n+++ b/src/environment.go\n@@ -229,6 +229,7 @@ func (env *environment) environ() map[string]string {\n \tif env.environCache != nil {\n \t\treturn env.environCache\n \t}\n+\tenv.environCache = make(map[string]string)\n \tconst separator = \"=\"\n \tvalues := os.Environ()\n \tfor value := range values {\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\nindex 167444c..7494014 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/ContinuouslyReplayTest.java\n@@ -11,6 +11,9 @@ import io.camunda.zeebe.engine.state.ZbColumnFamilies;\n import io.camunda.zeebe.engine.util.EngineRule;\n import io.camunda.zeebe.engine.util.ListLogStorage;\n import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n import org.assertj.core.api.SoftAssertions;\n import org.awaitility.Awaitility;\n import org.junit.Rule;\n@@ -27,16 +30,22 @@ public class ContinuouslyReplayTest {\n @Rule public final EngineRule processing = EngineRule.withSharedStorage(sharedStorage);\n \n @Test\n- public void shouldEndUpWithTheSameState() {\n+ public void shouldBuildTheSameStateOnProcessingAndReplay() {\n // given\n-\n- // when\n processing\n .deployment()\n- .withXmlResource(Bpmn.createExecutableProcess().startEvent().endEvent().done())\n+ .withXmlResource(Bpmn.createExecutableProcess(\"process\").startEvent().endEvent().done())\n .deploy();\n \n+ // when\n+ final var processInstanceKey = processing.processInstance().ofBpmnProcessId(\"process\").create();\n+\n // then\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessInstanceKey(processInstanceKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n assertStates();\n }\n \ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\nnew file mode 100644\nindex 0000000..9dd9f4c\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n@@ -0,0 +1,121 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.streamprocessor;\n+\n+import static io.camunda.zeebe.engine.util.RecordToWrite.command;\n+import static io.camunda.zeebe.engine.util.RecordToWrite.event;\n+import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ACTIVATE_ELEMENT;\n+import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ELEMENT_ACTIVATING;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.mockito.ArgumentMatchers.any;\n+import static org.mockito.ArgumentMatchers.anyLong;\n+import static org.mockito.ArgumentMatchers.eq;\n+import static org.mockito.Mockito.inOrder;\n+import static org.mockito.Mockito.never;\n+import static org.mockito.Mockito.timeout;\n+\n+import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n+import io.camunda.zeebe.engine.state.EventApplier;\n+import io.camunda.zeebe.engine.util.Records;\n+import io.camunda.zeebe.engine.util.StreamProcessorRule;\n+import io.camunda.zeebe.protocol.impl.record.value.processinstance.ProcessInstanceRecord;\n+import io.camunda.zeebe.protocol.record.ValueType;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.mockito.InOrder;\n+import org.mockito.Mock;\n+import org.mockito.junit.MockitoJUnit;\n+import org.mockito.junit.MockitoRule;\n+import org.mockito.verification.VerificationWithTimeout;\n+\n+public final class StreamProcessorReplayModeTest {\n+\n+ private static final long TIMEOUT_MILLIS = 2_000L;\n+ private static final VerificationWithTimeout TIMEOUT = timeout(TIMEOUT_MILLIS);\n+\n+ private static final int PARTITION_ID = 1;\n+\n+ private static final ProcessInstanceRecord RECORD = Records.processInstance(1);\n+\n+ @Rule\n+ public final StreamProcessorRule replayUntilEnd =\n+ new StreamProcessorRule(PARTITION_ID).withReplayMode(ReplayMode.UNTIL_END);\n+\n+ @Rule\n+ public final StreamProcessorRule replayContinuously =\n+ new StreamProcessorRule(PARTITION_ID).withReplayMode(ReplayMode.CONTINUOUSLY);\n+\n+ @Rule public MockitoRule mockitoRule = MockitoJUnit.rule();\n+\n+ @Mock private TypedRecordProcessor<?> typedRecordProcessor;\n+ @Mock private EventApplier eventApplier;\n+\n+ @Test\n+ public void shouldReplayUntilEnd() {\n+ // given\n+ replayUntilEnd.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // when\n+ startStreamProcessor(replayUntilEnd);\n+\n+ replayUntilEnd.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // then\n+ final InOrder inOrder = inOrder(typedRecordProcessor, eventApplier);\n+ inOrder.verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n+ inOrder.verify(typedRecordProcessor, TIMEOUT.times(1)).onRecovered(any());\n+ inOrder\n+ .verify(typedRecordProcessor, TIMEOUT)\n+ .processRecord(anyLong(), any(), any(), any(), any());\n+ inOrder.verifyNoMoreInteractions();\n+\n+ assertThat(getCurrentPhase(replayUntilEnd)).isEqualTo(Phase.PROCESSING);\n+ }\n+\n+ @Test\n+ public void shouldReplayContinuously() {\n+ // given\n+ replayContinuously.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // when\n+ startStreamProcessor(replayContinuously);\n+\n+ replayContinuously.writeBatch(\n+ command().processInstance(ACTIVATE_ELEMENT, RECORD),\n+ event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n+\n+ // then\n+ final InOrder inOrder = inOrder(typedRecordProcessor, eventApplier);\n+ inOrder\n+ .verify(eventApplier, TIMEOUT.times(2))\n+ .applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n+ inOrder.verify(typedRecordProcessor, never()).onRecovered(any());\n+ inOrder.verifyNoMoreInteractions();\n+\n+ assertThat(getCurrentPhase(replayContinuously)).isEqualTo(Phase.REPROCESSING);\n+ }\n+\n+ private void startStreamProcessor(final StreamProcessorRule streamProcessorRule) {\n+ streamProcessorRule\n+ .withEventApplierFactory(zeebeState -> eventApplier)\n+ .startTypedStreamProcessor(\n+ (processors, context) ->\n+ processors.onCommand(\n+ ValueType.PROCESS_INSTANCE, ACTIVATE_ELEMENT, typedRecordProcessor));\n+ }\n+\n+ private Phase getCurrentPhase(final StreamProcessorRule streamProcessorRule) {\n+ return streamProcessorRule.getStreamProcessor(PARTITION_ID).getCurrentPhase().join();\n+ }\n+}\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java b/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\nindex f626ed4..cf07b5c 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/EngineRule.java\n@@ -101,7 +101,6 @@ public final class EngineRule extends ExternalResource {\n new Int2ObjectHashMap<>();\n \n private long lastProcessedPosition = -1L;\n- private ReplayMode replayMode;\n \n private EngineRule(final int partitionCount) {\n this(partitionCount, null);\n@@ -176,7 +175,7 @@ public final class EngineRule extends ExternalResource {\n }\n \n public EngineRule withReplayMode(final ReplayMode replayMode) {\n- this.replayMode = replayMode;\n+ environmentRule.withReplayMode(replayMode);\n return this;\n }\n \n@@ -194,7 +193,6 @@ public final class EngineRule extends ExternalResource {\n (processingContext) ->\n EngineProcessors.createEngineProcessors(\n processingContext\n- .replayMode(replayMode)\n .onProcessedListener(\n record -> {\n lastProcessedPosition = record.getPosition();\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java b/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\nindex ab44773..1f9fe26 100755\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/StreamProcessorRule.java\n@@ -10,6 +10,7 @@ package io.camunda.zeebe.engine.util;\n import static io.camunda.zeebe.engine.util.StreamProcessingComposite.getLogName;\n \n import io.camunda.zeebe.db.ZeebeDbFactory;\n+import io.camunda.zeebe.engine.processing.streamprocessor.ReplayMode;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecord;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecordProcessorFactory;\n@@ -64,6 +65,7 @@ public final class StreamProcessorRule implements TestRule {\n private TestStreams streams;\n private StreamProcessingComposite streamProcessingComposite;\n private ListLogStorage sharedStorage = null;\n+ private ReplayMode replayMode = ReplayMode.UNTIL_END;\n \n public StreamProcessorRule() {\n this(new TemporaryFolder());\n@@ -125,6 +127,11 @@ public final class StreamProcessorRule implements TestRule {\n return this;\n }\n \n+ public StreamProcessorRule withReplayMode(final ReplayMode replayMode) {\n+ this.replayMode = replayMode;\n+ return this;\n+ }\n+\n public LogStreamRecordWriter getLogStreamRecordWriter(final int partitionId) {\n return streamProcessingComposite.getLogStreamRecordWriter(partitionId);\n }\n@@ -317,6 +324,7 @@ public final class StreamProcessorRule implements TestRule {\n @Override\n protected void before() {\n streams = new TestStreams(tempFolder, closeables, actorSchedulerRule.get());\n+ streams.withReplayMode(replayMode);\n \n int partitionId = startPartitionId;\n for (int i = 0; i < partitionCount; i++) {\ndiff --git a/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java b/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\nindex 18696b2..176c405 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/util/TestStreams.java\n@@ -17,6 +17,7 @@ import static org.mockito.Mockito.when;\n \n import io.camunda.zeebe.db.ZeebeDb;\n import io.camunda.zeebe.db.ZeebeDbFactory;\n+import io.camunda.zeebe.engine.processing.streamprocessor.ReplayMode;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedEventRegistry;\n import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecord;\n@@ -79,6 +80,7 @@ public final class TestStreams {\n private boolean snapshotWasTaken = false;\n \n private Function<MutableZeebeState, EventApplier> eventApplierFactory = EventAppliers::new;\n+ private ReplayMode replayMode = ReplayMode.UNTIL_END;\n \n public TestStreams(\n final TemporaryFolder dataDirectory,\n@@ -107,6 +109,10 @@ public final class TestStreams {\n this.eventApplierFactory = eventApplierFactory;\n }\n \n+ public void withReplayMode(final ReplayMode replayMode) {\n+ this.replayMode = replayMode;\n+ }\n+\n public CommandResponseWriter getMockedResponseWriter() {\n return mockCommandResponseWriter;\n }\n@@ -252,6 +258,7 @@ public final class TestStreams {\n .onProcessedListener(mockOnProcessedListener)\n .streamProcessorFactory(factory)\n .eventApplierFactory(eventApplierFactory)\n+ .replayMode(replayMode)\n .build();\n final var openFuture = streamProcessor.openAsync(false);\n \n"]
5
["bf95d5d0b34d32ef2684488feb3de01cb824b2b4", "cf6d526123abab2689b24a06aaf03d8e4d6ddff4", "b702adc245f679ae20d84de39f0d63b14aabed5d", "dc50bd35462a49058c91a939fc8830ae7a9eb692", "48d5d573886e9fdd0cca1cea47112c4a2f6edf52"]
["refactor", "docs", "ci", "fix", "test"]
convert to record,fix typos (#90),fix test Write another record so the commit position is updated and we can take a snapshot,fix scroll behavior in navigation,Use arm64v8 postfix for Cube Store :dev build
["diff --git a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\nindex cc998c6..65c8550 100755\n--- a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n@@ -167,13 +167,8 @@ public final class ExporterDirectorDistributionTest {\n * <p>This makes sure that even if we miss one export position event, we distribute the event\n * later again, which makes tests less flaky.\n */\n- private static final class ClockShifter implements ConditionEvaluationListener<Void> {\n-\n- private final ControlledActorClock clock;\n-\n- public ClockShifter(final ControlledActorClock clock) {\n- this.clock = clock;\n- }\n+ private record ClockShifter(ControlledActorClock clock)\n+ implements ConditionEvaluationListener<Void> {\n \n @Override\n public void conditionEvaluated(final EvaluatedCondition<Void> condition) {\n", "diff --git a/README.md b/README.md\nindex de15ac5..5ad8b47 100755\n--- a/README.md\n+++ b/README.md\n@@ -16,13 +16,13 @@ content that will be loaded, similar to Facebook cards loaders.\n \n ## Features\n \n-* :gear: **Complety customizable:** you can change the colors, speed and sizes;\n+* :gear: **Completely customizable:** you can change the colors, speed and sizes;\n * :pencil2: **Create your own loading:** use the\n [create-react-content-loader](https://danilowoz.github.io/create-react-content-loader/) to create\n- your customs loadings easily;\n+ your custom loadings easily;\n * :ok_hand: **You can use right now:** there are a lot of presets to use the loader, see the\n [options](#options);\n-* :rocket: **Perfomance:** react-content-loader uses pure SVG to work, so it's works without any extra scritpt,\n+* :rocket: **Performance:** react-content-loader uses pure SVG to work, so it works without any extra scripts,\n canvas, etc;\n \n ## Usage\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\nindex 24f1316..881c727 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n@@ -70,6 +70,14 @@ public class ReaderCloseTest {\n .getCluster()\n .getNodeId();\n clusteringRule.forceClusterToHaveNewLeader(followerId);\n+ // because of https://github.com/camunda-cloud/zeebe/issues/8329\n+ // we need to add another record so we can do a snapshot\n+ clientRule\n+ .getClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"test\")\n+ .correlationKey(\"test\")\n+ .send();\n \n // when\n clusteringRule.triggerAndWaitForSnapshots();\n@@ -78,6 +86,7 @@ public class ReaderCloseTest {\n for (final Broker broker : clusteringRule.getBrokers()) {\n assertThatFilesOfDeletedSegmentsDoesNotExist(broker);\n }\n+ assertThat(leaderId).isNotEqualTo(clusteringRule.getLeaderForPartition(1).getNodeId());\n }\n \n private void assertThatFilesOfDeletedSegmentsDoesNotExist(final Broker leader)\n", "diff --git a/website/layouts/Base.tsx b/website/layouts/Base.tsx\nindex 5959fd2..08d5674 100644\n--- a/website/layouts/Base.tsx\n+++ b/website/layouts/Base.tsx\n@@ -90,12 +90,21 @@ function SidebarItem({\n type SidebarNodeWrapper = {\n children: React.ReactNode,\n node: Sitemap,\n- elementRef: React.MutableRefObject<HTMLLIElement | null>;\n+ isActive: boolean;\n };\n \n-function SidebarNodeWrapper({ children, node, elementRef }: SidebarNodeWrapper) {\n+function SidebarNodeWrapper({ children, node, isActive }: SidebarNodeWrapper) {\n+ const { asPath } = useRouter();\n+ const nodeRef = useRef<HTMLLIElement | null>(null);\n+\n+ useEffect(() => {\n+ if (isActive) {\n+ nodeRef.current?.scrollIntoView({ behavior: 'smooth', block: 'nearest', inline: 'start' });\n+ }\n+ }, [asPath]);\n+\n if (node.resource?.label) {\n- return <li ref={elementRef}>{children}</li>;\n+ return <li ref={nodeRef}>{children}</li>;\n }\n \n return <>{children}</>;\n@@ -109,14 +118,12 @@ type SidebarNodeProps = {\n \n function SidebarNode({ node, level, isNodeActive }: SidebarNodeProps) {\n const { asPath } = useRouter();\n- const nodeWrapperRef = useRef<HTMLLIElement | null>(null);\n const isFirstLevel = level === 1;\n const initialIsExpanded = !isFirstLevel || hasActiveChild(node);\n const [isExpanded, setIsExpanded] = useState(initialIsExpanded);\n \n useEffect(() => {\n setIsExpanded(initialIsExpanded);\n- nodeWrapperRef.current?.scrollIntoView({ behavior: 'smooth', block: 'nearest', inline: 'start' });\n }, [asPath]);\n \n const id = node.resource?.label?.toLowerCase().replace(/\\s/g, '-');\n@@ -136,7 +143,7 @@ function SidebarNode({ node, level, isNodeActive }: SidebarNodeProps) {\n }\n \n return (\n- <SidebarNodeWrapper node={node} elementRef={nodeWrapperRef}>\n+ <SidebarNodeWrapper node={node} isActive={isNodeActive(node)}>\n <>\n {node.resource?.label ? (\n <SidebarItem\n", "diff --git a/.github/workflows/rust-cubestore-master.yml b/.github/workflows/rust-cubestore-master.yml\nindex 4a84984..bb07cd7 100644\n--- a/.github/workflows/rust-cubestore-master.yml\n+++ b/.github/workflows/rust-cubestore-master.yml\n@@ -115,9 +115,9 @@ jobs:\n if [[ $VERSION =~ ^v[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}$ ]]; then\n MINOR=${VERSION%.*}\n MAJOR=${MINOR%.*}\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR},${DOCKER_IMAGE}:latest\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR}\"\n elif [ \"${{ github.event_name }}\" = \"push\" ]; then\n- TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}\"\n+ TAGS=\"$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}${{ matrix.postfix }}\"\n fi\n \n echo ::set-output name=version::${VERSION}\n"]
5
["3346331a963766c8193170fb130adad2e658ada2", "88257ee720ed8ba136d49087c0d31373e8397dd5", "47df74d40becf915a9d89cdb887abd259b77def0", "4b5604063fcb8ff457bcb61fdbea85c6b3a5c620", "10bdcb452ff9d2b884d45a9c43a4b8a20fc4a883"]
["refactor", "docs", "test", "fix", "ci"]
template properties,add missing region to cloudformation_stack_set,build improvements,generate terminate end event compatible execution steps part 1 The random execution tests don't know the concept of flow scopes. This makes it challenging to generate a correct execution path for terminate end events, as they terminate a specific flow scope. Processing should continue as normal once the flow scope has been terminated. Whilst we don't have flow scopes, we do have execution path segments. These segments don't map 1 to 1 to flow scopes. However, since every flow scope starts a new segment we can use these segments to get the desired behavior. Each segment must keep track whether is has reached a terminate end event. If this is the case that means that we don't expect any further execution steps. We can isolate this behavior in a single location, during the appending of one segment to another segment. In order to differentiate between flow scopes a new append method has been added which takes the boolean `changesFlowScope` as a parameter. Blockbuilder where the flow scope changes (e.g. SubProcessBlockBuilder) can use this to indicate that even though a terminate end event has been reached. Execution steps after this specific segment still need to added to complete the process. When a segment is appended to a different segment and the flow scope does not change we can use the segment that should be appended to identify whether new segment can still be added to the current segment. If passed segment has reached a terminate end event and the flow scope has not been changed it is guaranteed that the current segment is in the same flow scope has the previous segment and thus has also reached the terminate end event.,switch QA to new testbench-1.x-prod In order to use the new Testbench that is compatible with Zeebe 1.x versions, this switches the client id and secrets used by the QA stage.
["diff --git a/docs/docs/segment-angular.md b/docs/docs/segment-angular.md\nindex b7ff7d8..c307239 100644\n--- a/docs/docs/segment-angular.md\n+++ b/docs/docs/segment-angular.md\n@@ -29,3 +29,17 @@ Display the currently active Angular CLI version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `angular.json` file is present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-azfunc.md b/docs/docs/segment-azfunc.md\nindex 6b4368a..984c0fb 100644\n--- a/docs/docs/segment-azfunc.md\n+++ b/docs/docs/segment-azfunc.md\n@@ -33,3 +33,17 @@ Display the currently active Azure functions CLI version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when a `host.json` or `local.settings.json` files is present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-crystal.md b/docs/docs/segment-crystal.md\nindex 9cf8ead..8f995bc 100644\n--- a/docs/docs/segment-crystal.md\n+++ b/docs/docs/segment-crystal.md\n@@ -32,3 +32,17 @@ Display the currently active crystal version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.cr` or `shard.yml` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+ properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-dart.md b/docs/docs/segment-dart.md\nindex ddfe247..9eb1d0e 100644\n--- a/docs/docs/segment-dart.md\n+++ b/docs/docs/segment-dart.md\n@@ -33,3 +33,17 @@ Display the currently active dart version.\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.dart`, `pubspec.yaml`, `pubspec.yml`, `pubspec.lock` files or the `.dart_tool`\n folder are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-dotnet.md b/docs/docs/segment-dotnet.md\nindex a8300c1..83bb0c2 100644\n--- a/docs/docs/segment-dotnet.md\n+++ b/docs/docs/segment-dotnet.md\n@@ -37,12 +37,13 @@ Display the currently active .NET SDK version.\n - unsupported_version_icon: `string` - text/icon that is displayed when the active .NET SDK version (e.g., one specified\n by `global.json`) is not installed/supported - defaults to `\\uf071` (X in a rectangle box)\n - template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n-properties below. Defaults does nothing(backward compatibility).\n+properties below. Defaults to `{{ .Full }}`\n - version_url_template: `string` - A go [text/template][go-text-template] template extended\n with [sprig][sprig] utilizing the properties below. Defaults does nothing(backward compatibility).\n \n ## Template Properties\n \n+- `.Full`: `string` - the full version\n - `.Major`: `string` - is the major version\n - `.Minor`: `string` - is the minor version\n - `.Patch`: `string` - is the patch version\ndiff --git a/docs/docs/segment-golang.md b/docs/docs/segment-golang.md\nindex 10321d3..7790269 100644\n--- a/docs/docs/segment-golang.md\n+++ b/docs/docs/segment-golang.md\n@@ -32,3 +32,14 @@ Display the currently active golang version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.go` or `go.mod` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\ndiff --git a/docs/docs/segment-java.md b/docs/docs/segment-java.md\nindex f4cc85d..c13c3e0 100644\n--- a/docs/docs/segment-java.md\n+++ b/docs/docs/segment-java.md\n@@ -45,3 +45,14 @@ Display the currently active java version.\n - `*.jar`\n - `*.clj`\n - `*.cljc`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\ndiff --git a/docs/docs/segment-julia.md b/docs/docs/segment-julia.md\nindex 4b75608..3a4a0ec 100644\n--- a/docs/docs/segment-julia.md\n+++ b/docs/docs/segment-julia.md\n@@ -32,3 +32,17 @@ Display the currently active julia version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.jl` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-node.md b/docs/docs/segment-node.md\nindex 04d5963..ced7d23 100644\n--- a/docs/docs/segment-node.md\n+++ b/docs/docs/segment-node.md\n@@ -40,3 +40,17 @@ segment's background or foreground color\n - display_package_manager: `boolean` - show whether the current project uses Yarn or NPM - defaults to `false`\n - yarn_icon: `string` - the icon/text to display when using Yarn - defaults to ` \\uF61A`\n - npm_icon: `string` - the icon/text to display when using NPM - defaults to ` \\uE71E`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-php.md b/docs/docs/segment-php.md\nindex a7b05aa..47b8ea4 100644\n--- a/docs/docs/segment-php.md\n+++ b/docs/docs/segment-php.md\n@@ -34,3 +34,17 @@ Display the currently active php version.\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.php, composer.json, composer.lock, .php-version` files are present (default)\n - enable_hyperlink: `bool` - display an hyperlink to the php release notes - defaults to `false`\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-python.md b/docs/docs/segment-python.md\nindex 80fa718..13bd1f8 100644\n--- a/docs/docs/segment-python.md\n+++ b/docs/docs/segment-python.md\n@@ -39,3 +39,17 @@ or not - defaults to `true`\n files are present (default)\n - `environment`: the segment is only displayed when a virtual env is present\n - `context`: the segment is only displayed when either `environment` or `files` is active\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-ruby.md b/docs/docs/segment-ruby.md\nindex e64fcf7..5d812f6 100644\n--- a/docs/docs/segment-ruby.md\n+++ b/docs/docs/segment-ruby.md\n@@ -32,3 +32,17 @@ Display the currently active ruby version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.rb`, `Gemfile` or `Rakefile` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/docs/docs/segment-rust.md b/docs/docs/segment-rust.md\nindex 30c222d..c0f2a43 100644\n--- a/docs/docs/segment-rust.md\n+++ b/docs/docs/segment-rust.md\n@@ -32,3 +32,17 @@ Display the currently active rust version.\n - display_mode: `string` - determines when the segment is displayed\n - `always`: the segment is always displayed\n - `files`: the segment is only displayed when `*.rs`, `Cargo.toml` or `Cargo.lock` files are present (default)\n+- template: `string` - A go [text/template][go-text-template] template extended with [sprig][sprig] utilizing the\n+properties below. Defaults to `{{ .Full }}`\n+\n+## Template Properties\n+\n+- `.Full`: `string` - the full version\n+- `.Major`: `string` - is the major version\n+- `.Minor`: `string` - is the minor version\n+- `.Patch`: `string` - is the patch version\n+- `.Prerelease`: `string` - is the prerelease version\n+- `.BuildMetadata`: `string` - is the build metadata\n+\n+[go-text-template]: https://golang.org/pkg/text/template/\n+[sprig]: https://masterminds.github.io/sprig/\ndiff --git a/src/segment_language.go b/src/segment_language.go\nindex d9ced7b..2cfffa8 100644\n--- a/src/segment_language.go\n+++ b/src/segment_language.go\n@@ -97,7 +97,7 @@ func (l *language) string() string {\n \t\treturn \"\"\n \t}\n \n-\tsegmentTemplate := l.props.getString(SegmentTemplate, \"{{.Full}}\")\n+\tsegmentTemplate := l.props.getString(SegmentTemplate, \"{{ .Full }}\")\n \ttemplate := &textTemplate{\n \t\tTemplate: segmentTemplate,\n \t\tContext: l.version,\n", "diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n", "diff --git a/.travis.yml b/.travis.yml\nindex 9e1b926..3144244 100644\n--- a/.travis.yml\n+++ b/.travis.yml\n@@ -1,5 +1,6 @@\n language: node_js\n dist: trusty\n+sudo: required\n node_js:\n - '6.9.5'\n before_install:\ndiff --git a/e2e/schematics/command-line.test.ts b/e2e/schematics/command-line.test.ts\nindex 16d8b34..ea91494 100644\n--- a/e2e/schematics/command-line.test.ts\n+++ b/e2e/schematics/command-line.test.ts\n@@ -68,8 +68,6 @@ describe('Command line', () => {\n \n updateFile('apps/myapp/src/app/app.component.spec.ts', `import '@nrwl/mylib';`);\n \n- updateRunAffectedToWorkInE2ESetup();\n-\n const affectedApps = runCommand('npm run affected:apps -- --files=\"libs/mylib/index.ts\"');\n expect(affectedApps).toContain('myapp');\n expect(affectedApps).not.toContain('myapp2');\n@@ -147,11 +145,3 @@ describe('Command line', () => {\n 1000000\n );\n });\n-\n-function updateRunAffectedToWorkInE2ESetup() {\n- const runAffected = readFile('node_modules/@nrwl/schematics/src/command-line/affected.js');\n- const newRunAffected = runAffected\n- .replace('ng build', '../../node_modules/.bin/ng build')\n- .replace('ng e2e', '../../node_modules/.bin/ng e2e');\n- updateFile('node_modules/@nrwl/schematics/src/command-line/affected.js', newRunAffected);\n-}\ndiff --git a/e2e/schematics/workspace.test.ts b/e2e/schematics/workspace.test.ts\nindex 8a41070..8749926 100644\n--- a/e2e/schematics/workspace.test.ts\n+++ b/e2e/schematics/workspace.test.ts\n@@ -82,7 +82,7 @@ describe('Nrwl Convert to Nx Workspace', () => {\n \n it('should generate a workspace and not change dependencies or devDependencies if they already exist', () => {\n // create a new AngularCLI app\n- runNgNew('--skip-install');\n+ runNgNew();\n const nxVersion = '0.0.0';\n const schematicsVersion = '0.0.0';\n const ngrxVersion = '0.0.0';\ndiff --git a/e2e/utils.ts b/e2e/utils.ts\nindex 422d866..a03104f 100644\n--- a/e2e/utils.ts\n+++ b/e2e/utils.ts\n@@ -17,8 +17,7 @@ export function newProject(): void {\n copyMissingPackages();\n execSync('mv ./tmp/proj ./tmp/proj_backup');\n }\n- execSync('cp -r ./tmp/proj_backup ./tmp/proj');\n- setUpSynLink();\n+ execSync('cp -a ./tmp/proj_backup ./tmp/proj');\n }\n \n export function copyMissingPackages(): void {\n@@ -26,14 +25,9 @@ export function copyMissingPackages(): void {\n modulesToCopy.forEach(m => copyNodeModule(projectName, m));\n }\n \n-export function setUpSynLink(): void {\n- execSync(`ln -s ../@nrwl/schematics/src/command-line/nx.js tmp/${projectName}/node_modules/.bin/nx`);\n- execSync(`chmod +x tmp/${projectName}/node_modules/.bin/nx`);\n-}\n-\n function copyNodeModule(path: string, name: string) {\n execSync(`rm -rf tmp/${path}/node_modules/${name}`);\n- execSync(`cp -r node_modules/${name} tmp/${path}/node_modules/${name}`);\n+ execSync(`cp -a node_modules/${name} tmp/${path}/node_modules/${name}`);\n }\n \n export function runCLI(\n@@ -43,7 +37,7 @@ export function runCLI(\n }\n ): string {\n try {\n- return execSync(`../../node_modules/.bin/ng ${command}`, {\n+ return execSync(`./node_modules/.bin/ng ${command}`, {\n cwd: `./tmp/${projectName}`\n })\n .toString()\n@@ -67,7 +61,7 @@ export function newLib(name: string): string {\n }\n \n export function runSchematic(command: string): string {\n- return execSync(`../../node_modules/.bin/schematics ${command}`, {\n+ return execSync(`./node_modules/.bin/schematics ${command}`, {\n cwd: `./tmp/${projectName}`\n }).toString();\n }\ndiff --git a/package.json b/package.json\nindex bef54f8..9186a58 100644\n--- a/package.json\n+++ b/package.json\n@@ -6,7 +6,7 @@\n \"private\": true,\n \"scripts\": {\n \"build\": \"./scripts/build.sh\",\n- \"e2e\": \"yarn build && ./scripts/e2e.sh\",\n+ \"e2e\": \"./scripts/e2e.sh\",\n \"format\": \"./scripts/format.sh\",\n \"linknpm\": \"./scripts/link.sh\",\n \"package\": \"./scripts/package.sh\",\n@@ -14,7 +14,7 @@\n \"copy\": \"./scripts/copy.sh\",\n \"test:schematics\": \"yarn build && ./scripts/test_schematics.sh\",\n \"test:nx\": \"yarn build && ./scripts/test_nx.sh\",\n- \"test\": \"yarn build && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n+ \"test\": \"yarn linknpm && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n \"checkformat\": \"./scripts/check-format.sh\",\n \"publish_npm\": \"./scripts/publish.sh\"\n },\ndiff --git a/packages/schematics/src/collection/workspace/index.ts b/packages/schematics/src/collection/workspace/index.ts\nindex 8f8897f..c70d161 100644\n--- a/packages/schematics/src/collection/workspace/index.ts\n+++ b/packages/schematics/src/collection/workspace/index.ts\n@@ -254,20 +254,7 @@ function moveFiles(options: Schema) {\n \n function copyAngularCliTgz() {\n return (host: Tree) => {\n- copyFile(\n- path.join(\n- 'node_modules',\n- '@nrwl',\n- 'schematics',\n- 'src',\n- 'collection',\n- 'application',\n- 'files',\n- '__directory__',\n- '.angular_cli.tgz'\n- ),\n- '.'\n- );\n+ copyFile(path.join(__dirname, '..', 'application', 'files', '__directory__', '.angular_cli.tgz'), '.');\n return host;\n };\n }\ndiff --git a/packages/schematics/src/command-line/affected.ts b/packages/schematics/src/command-line/affected.ts\nindex b7f9173..89a4f72 100644\n--- a/packages/schematics/src/command-line/affected.ts\n+++ b/packages/schematics/src/command-line/affected.ts\n@@ -1,5 +1,7 @@\n import { execSync } from 'child_process';\n import { getAffectedApps, parseFiles } from './shared';\n+import * as path from 'path';\n+import * as resolve from 'resolve';\n \n export function affected(args: string[]): void {\n const command = args[0];\n@@ -39,7 +41,7 @@ function build(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Building ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n console.log('No apps to build');\n@@ -50,9 +52,13 @@ function e2e(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Testing ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n- console.log('No apps to tst');\n+ console.log('No apps to test');\n }\n }\n+\n+function ngPath() {\n+ return `${path.dirname(path.dirname(path.dirname(resolve.sync('@angular/cli', { basedir: __dirname }))))}/bin/ng`;\n+}\ndiff --git a/scripts/build.sh b/scripts/build.sh\nindex ac533b5..9b8891b 100755\n--- a/scripts/build.sh\n+++ b/scripts/build.sh\n@@ -3,6 +3,8 @@\n rm -rf build\n ngc\n rsync -a --exclude=*.ts packages/ build/packages\n+chmod +x build/packages/schematics/bin/create-nx-workspace.js\n+chmod +x build/packages/schematics/src/command-line/nx.js\n rm -rf build/packages/install\n cp README.md build/packages/schematics\n cp README.md build/packages/nx\n\\ No newline at end of file\n", "diff --git a/test-util/src/main/java/io/camunda/zeebe/test/util/bpmn/random/ExecutionPathSegment.java b/test-util/src/main/java/io/camunda/zeebe/test/util/bpmn/random/ExecutionPathSegment.java\nindex da33c23..23c43be 100644\n--- a/test-util/src/main/java/io/camunda/zeebe/test/util/bpmn/random/ExecutionPathSegment.java\n+++ b/test-util/src/main/java/io/camunda/zeebe/test/util/bpmn/random/ExecutionPathSegment.java\n@@ -29,6 +29,10 @@ import org.apache.commons.lang3.builder.ToStringStyle;\n */\n public final class ExecutionPathSegment {\n \n+ // If we have reached a terminate end event we want to stop generating execution steps for a\n+ // specific flow scope. By setting this flag to true no new execution steps will be added for the\n+ // flow scope this segment is in.\n+ private boolean reachedTerminateEndEvent = false;\n private final List<ScheduledExecutionStep> scheduledSteps = new ArrayList<>();\n private final Map<String, Object> variableDefaults = new HashMap<>();\n \n@@ -87,10 +91,28 @@ public final class ExecutionPathSegment {\n new ScheduledExecutionStep(logicalPredecessor, executionPredecessor, executionStep));\n }\n \n+ /**\n+ * Appends the steps of the passed execution path segment to the current segment.\n+ *\n+ * @param pathToAdd execution path segment to append to this segment\n+ */\n public void append(final ExecutionPathSegment pathToAdd) {\n+ append(pathToAdd, false);\n+ }\n+\n+ /**\n+ * Appends the step of the passed execution path segment to the current segment if the current\n+ *\n+ * @param pathToAdd\n+ * @param changesFlowScope\n+ */\n+ public void append(final ExecutionPathSegment pathToAdd, final boolean changesFlowScope) {\n mergeVariableDefaults(pathToAdd);\n \n- pathToAdd.getScheduledSteps().forEach(this::append);\n+ if (!hasReachedTerminateEndEvent() || changesFlowScope) {\n+ pathToAdd.getScheduledSteps().forEach(this::append);\n+ }\n+ reachedTerminateEndEvent = pathToAdd.hasReachedTerminateEndEvent() && !changesFlowScope;\n }\n \n public void append(final ScheduledExecutionStep scheduledExecutionStep) {\n@@ -259,6 +281,14 @@ public final class ExecutionPathSegment {\n return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);\n }\n \n+ public boolean hasReachedTerminateEndEvent() {\n+ return reachedTerminateEndEvent;\n+ }\n+\n+ public void setReachedTerminateEndEvent(final boolean reachedTerminateEndEvent) {\n+ this.reachedTerminateEndEvent = reachedTerminateEndEvent;\n+ }\n+\n /**\n * An execution boundary is the point where automatic and non-automatic {@link\n * ScheduledExecutionStep}'s meet each other. This class contains information about the existing\n", "diff --git a/Jenkinsfile b/Jenkinsfile\nindex 176ab58..bead402 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -326,7 +326,7 @@ pipeline {\n TAG = \"${env.VERSION}-${env.GIT_COMMIT}\"\n DOCKER_GCR = credentials(\"zeebe-gcr-serviceaccount-json\")\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\n QA_RUN_VARIABLES = \"{\\\"zeebeImage\\\": \\\"${env.IMAGE}:${env.TAG}\\\", \\\"generationTemplate\\\": \\\"${params.GENERATION_TEMPLATE}\\\", \" +\n \"\\\"channel\\\": \\\"Internal Dev\\\", \\\"branch\\\": \\\"${env.BRANCH_NAME}\\\", \\\"build\\\": \\\"${currentBuild.absoluteUrl}\\\", \" +\n \"\\\"businessKey\\\": \\\"${currentBuild.absoluteUrl}\\\", \\\"processId\\\": \\\"qa-protocol\\\"}\"\n@@ -341,7 +341,7 @@ pipeline {\n withVault(\n [vaultSecrets:\n [\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\n secretValues:\n [\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\n"]
5
["3a4e21c36d76b4bea8dbb365d3c3bd005a7f3f8f", "304d0588f634e9e72087a706367c53af9c7f7180", "e0a977b2d316e7612b5d72cb02cd7d78e75dbc55", "40597fb4de41c7194eb99479a914db70da7909ea", "c81a0c2999454c859b4bf4da5779712960d239be"]
["docs", "fix", "build", "feat", "ci"]
add descriptions to buttons on hover,fixed start types for size and opacity,fix monorepo.dir prop Signed-off-by: Carlos Alexandro Becker <[email protected]>,make sure root is being watched and setRoot called when it changes,verify process can start at supported element types Verifies a PI can be started at specific element types. The test will deploy the process, start an instance at the desired start element and verify that it has been activated succesfully.
["diff --git a/benchmarks/main.mjs b/benchmarks/main.mjs\nindex 0c2dc6b..e2f79d4 100644\n--- a/benchmarks/main.mjs\n+++ b/benchmarks/main.mjs\n@@ -65,8 +65,9 @@ const vnode = () =>\n },\n style: style({ margin: '5px' }),\n disabled,\n+ title: suite.name.split(' | ')[1],\n },\n- [suite.name],\n+ [suite.name.split(' | ')[0]],\n ),\n ),\n m(\ndiff --git a/benchmarks/suites/appendManyRowsToLargeTable.mjs b/benchmarks/suites/appendManyRowsToLargeTable.mjs\nindex e6a034e..7e34ca3 100644\n--- a/benchmarks/suites/appendManyRowsToLargeTable.mjs\n+++ b/benchmarks/suites/appendManyRowsToLargeTable.mjs\n@@ -31,7 +31,9 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('append many rows to large table');\n+const suite = new benchmark.Suite(\n+ 'append many rows to large table | appending 1,000 to a table of 10,000 rows.',\n+);\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/clearRows.mjs b/benchmarks/suites/clearRows.mjs\nindex ad47036..2a7711b 100644\n--- a/benchmarks/suites/clearRows.mjs\n+++ b/benchmarks/suites/clearRows.mjs\n@@ -27,7 +27,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(row);\n });\n \n-const suite = new benchmark.Suite('clear rows');\n+const suite = new benchmark.Suite('clear rows | clearing a table with 1,000 rows');\n \n const hoistedVNode = m('table', undefined, [], VFlags.NO_CHILDREN);\n \ndiff --git a/benchmarks/suites/createManyRows.mjs b/benchmarks/suites/createManyRows.mjs\nindex 578f511..96c7b02 100644\n--- a/benchmarks/suites/createManyRows.mjs\n+++ b/benchmarks/suites/createManyRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create many rows');\n+const suite = new benchmark.Suite('create many rows | creating 10,000 rows');\n \n const hoistedVNode = m(\n 'div',\ndiff --git a/benchmarks/suites/createRows.mjs b/benchmarks/suites/createRows.mjs\nindex bfcc876..4d9ff57 100644\n--- a/benchmarks/suites/createRows.mjs\n+++ b/benchmarks/suites/createRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create rows');\n+const suite = new benchmark.Suite('create rows | creating 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/partialUpdate.mjs b/benchmarks/suites/partialUpdate.mjs\nindex 55948a9..c5f1de3 100644\n--- a/benchmarks/suites/partialUpdate.mjs\n+++ b/benchmarks/suites/partialUpdate.mjs\n@@ -34,7 +34,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('partial update');\n+const suite = new benchmark.Suite('partial update | updating every 10th row for 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/removeRow.mjs b/benchmarks/suites/removeRow.mjs\nindex aeb1e9a..31c7599 100644\n--- a/benchmarks/suites/removeRow.mjs\n+++ b/benchmarks/suites/removeRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('remove row');\n+const suite = new benchmark.Suite('remove row | removing one row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/replaceAllRows.mjs b/benchmarks/suites/replaceAllRows.mjs\nindex 9555ae4..7001667 100644\n--- a/benchmarks/suites/replaceAllRows.mjs\n+++ b/benchmarks/suites/replaceAllRows.mjs\n@@ -41,7 +41,7 @@ data2.forEach(({ id, label }) => {\n \n shuffleArray(data2);\n \n-const suite = new benchmark.Suite('replace all rows');\n+const suite = new benchmark.Suite('replace all rows | updating all 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/selectRow.mjs b/benchmarks/suites/selectRow.mjs\nindex 76be216..de69359 100644\n--- a/benchmarks/suites/selectRow.mjs\n+++ b/benchmarks/suites/selectRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('select row');\n+const suite = new benchmark.Suite('select row | highlighting a selected row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/swapRows.mjs b/benchmarks/suites/swapRows.mjs\nindex 2a91e74..ce52036 100644\n--- a/benchmarks/suites/swapRows.mjs\n+++ b/benchmarks/suites/swapRows.mjs\n@@ -36,7 +36,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('swap rows');\n+const suite = new benchmark.Suite('swap rows | swap 2 rows for table with 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\n", "diff --git a/core/main/src/Core/Particle.ts b/core/main/src/Core/Particle.ts\nindex 1aa6fba..6ea6ffc 100644\n--- a/core/main/src/Core/Particle.ts\n+++ b/core/main/src/Core/Particle.ts\n@@ -271,7 +271,7 @@ export class Particle implements IParticle {\n }\n }\n \n- const sizeAnimation = this.options.size.animation;\n+ const sizeAnimation = sizeOptions.animation;\n \n if (sizeAnimation.enable) {\n this.size.status = AnimationStatus.increasing;\n@@ -279,7 +279,8 @@ export class Particle implements IParticle {\n if (!randomSize) {\n switch (sizeAnimation.startValue) {\n case StartValueType.min:\n- this.size.value = sizeAnimation.minimumValue * pxRatio;\n+ this.size.value = NumberUtils.getRangeMin(sizeOptions.value) * pxRatio;\n+ this.size.status = AnimationStatus.increasing;\n \n break;\n \n@@ -287,11 +288,14 @@ export class Particle implements IParticle {\n this.size.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(sizeAnimation.minimumValue * pxRatio, this.size.value)\n );\n+ this.size.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.size.value = NumberUtils.getRangeMax(sizeOptions.value) * pxRatio;\n this.size.status = AnimationStatus.decreasing;\n \n break;\n@@ -393,7 +397,8 @@ export class Particle implements IParticle {\n if (!randomOpacity) {\n switch (opacityAnimation.startValue) {\n case StartValueType.min:\n- this.opacity.value = opacityAnimation.minimumValue;\n+ this.opacity.value = NumberUtils.getRangeMin(this.opacity.value);\n+ this.opacity.status = AnimationStatus.increasing;\n \n break;\n \n@@ -401,11 +406,14 @@ export class Particle implements IParticle {\n this.opacity.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(opacityAnimation.minimumValue, this.opacity.value)\n );\n+ this.opacity.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.opacity.value = NumberUtils.getRangeMax(this.opacity.value);\n this.opacity.status = AnimationStatus.decreasing;\n \n break;\ndiff --git a/presets/confetti/src/options.ts b/presets/confetti/src/options.ts\nindex 7fc6225..a713425 100644\n--- a/presets/confetti/src/options.ts\n+++ b/presets/confetti/src/options.ts\n@@ -28,7 +28,7 @@ export const loadOptions = (confettiOptions: RecursivePartial<IConfettiOptions>)\n animation: {\n enable: true,\n minimumValue: 0,\n- speed: 2,\n+ speed: 0.5,\n startValue: \"max\",\n destroy: \"min\",\n },\n", "diff --git a/www/docs/customization/monorepo.md b/www/docs/customization/monorepo.md\nindex 6d0e857..e45490f 100644\n--- a/www/docs/customization/monorepo.md\n+++ b/www/docs/customization/monorepo.md\n@@ -18,7 +18,7 @@ project_name: subproj1\n \n monorepo:\n tag_prefix: subproject1/\n- folder: subproj1\n+ dir: subproj1\n ```\n \n Then, you can release with (from the project's root directory):\n@@ -30,11 +30,11 @@ goreleaser release --rm-dist -f ./subproj1/.goreleaser.yml\n Then, the following is different from a \"regular\" run:\n \n - GoReleaser will then look if current commit has a tag prefixed with `subproject1`, and also the previous tag with the same prefix;\n-- Changelog will include only commits that contain changes to files within the `subproj1` folder;\n+- Changelog will include only commits that contain changes to files within the `subproj1` directory;\n - Release name gets prefixed with `{{ .ProjectName }} ` if empty;\n-- All build's `dir` setting get set to `monorepo.folder` if empty;\n+- All build's `dir` setting get set to `monorepo.dir` if empty;\n - if yours is not, you might want to change that manually;\n-- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.folder`;\n+- Extra files on the release, archives, Docker builds, etc are prefixed with `monorepo.dir`;\n - On templates, `{{.PrefixedTag}}` will be `monorepo.prefix/tag` (aka the actual tag name), and `{{.Tag}}` has the prefix stripped;\n \n The rest of the release process should work as usual.\n", "diff --git a/packages/core/src/components/nav/nav.tsx b/packages/core/src/components/nav/nav.tsx\nindex 5aaacb6..27241ee 100644\n--- a/packages/core/src/components/nav/nav.tsx\n+++ b/packages/core/src/components/nav/nav.tsx\n@@ -1,4 +1,4 @@\n-import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n+import { Component, Element, Event, EventEmitter, Listen, Method, Prop, Watch } from '@stencil/core';\n import {\n Animation,\n AnimationController,\n@@ -103,10 +103,19 @@ export class Nav implements PublicNav, NavContainer {\n }\n this.init = true;\n if (!this.useRouter) {\n+ console.log('componentDidLoadImpl: ', this.root);\n componentDidLoadImpl(this);\n }\n }\n \n+ @Watch('root')\n+ updateRootComponent(): any {\n+ console.log('updateRootComponent: ', this.root);\n+ if (this.init) {\n+ return this.setRoot(this.root);\n+ }\n+ }\n+\n getViews(): PublicViewController[] {\n return getViews(this);\n }\ndiff --git a/packages/core/src/components/nav/test/set-root/index.html b/packages/core/src/components/nav/test/set-root/index.html\nnew file mode 100644\nindex 0000000..823c9ed\n--- /dev/null\n+++ b/packages/core/src/components/nav/test/set-root/index.html\n@@ -0,0 +1,110 @@\n+<!DOCTYPE html>\n+<html dir=\"ltr\">\n+<head>\n+ <meta charset=\"UTF-8\">\n+ <title>Nav</title>\n+ <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no\">\n+ <script src=\"/dist/ionic.js\"></script>\n+</head>\n+<body onload=\"initiaize()\">\n+ <ion-app>\n+ <ion-nav root=\"page-one\"></ion-nav>\n+ </ion-app>\n+</body>\n+\n+<script>\n+\n+ class PageOne extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page One</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page One</h1>\n+ <ion-button class=\"next\">Go to Page Two</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const button = this.querySelector('ion-button');\n+ button.addEventListener('click', async () => {\n+ this.closest('ion-nav').push('page-two');\n+ });\n+ }\n+ }\n+\n+ class PageTwo extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page Two</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page Two</h1>\n+ <ion-button class=\"next\">Go to Page Three</ion-button>\n+ <ion-button class=\"previous\">Go Back</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const previousButton = this.querySelector('ion-button.previous');\n+ previousButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').pop();\n+ });\n+\n+ const nextButton = this.querySelector('ion-button.next');\n+ nextButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').push('page-three');\n+ });\n+ }\n+ }\n+\n+ class PageThree extends HTMLElement {\n+ async connectedCallback() {\n+ this.innerHTML = `\n+ <ion-page>\n+ <ion-header>\n+ <ion-toolbar>\n+ <ion-title>Page Three</ion-title>\n+ </ion-toolbar>\n+ </ion-header>\n+ <ion-content padding>\n+ <h1>Page Three</h1>\n+ <ion-button class=\"previous\">Go Back</ion-button>\n+ </ion-content>\n+ </ion-page>`;\n+\n+ const previousButton = this.querySelector('ion-button.previous');\n+ previousButton.addEventListener('click', async () => {\n+ await this.closest('ion-nav').pop();\n+ });\n+ }\n+ }\n+\n+ customElements.define('page-one', PageOne);\n+ customElements.define('page-two', PageTwo);\n+ customElements.define('page-three', PageThree);\n+\n+ async function initiaize() {\n+ const nav = document.querySelector('ion-nav');\n+ await nav.componentOnReady();\n+ nav.root = 'page-one';\n+\n+ setInterval(() => {\n+ if (nav.root === 'page-one') {\n+ nav.root = 'page-two';\n+ } else if ( nav.root === 'page-two') {\n+ nav.root = 'page-three';\n+ } else {\n+ nav.root = 'page-one';\n+ }\n+ }, 1000);\n+ }\n+\n+</script>\n+</html>\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\nnew file mode 100644\nindex 0000000..a505307\n--- /dev/null\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/processinstance/CreateProcessInstanceSupportedElementTest.java\n@@ -0,0 +1,233 @@\n+/*\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\n+ * one or more contributor license agreements. See the NOTICE file distributed\n+ * with this work for additional information regarding copyright ownership.\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\n+ * except in compliance with the Zeebe Community License 1.1.\n+ */\n+package io.camunda.zeebe.engine.processing.processinstance;\n+\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.groups.Tuple.tuple;\n+\n+import io.camunda.zeebe.engine.util.EngineRule;\n+import io.camunda.zeebe.model.bpmn.Bpmn;\n+import io.camunda.zeebe.model.bpmn.BpmnModelInstance;\n+import io.camunda.zeebe.protocol.record.Record;\n+import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n+import io.camunda.zeebe.protocol.record.value.BpmnElementType;\n+import io.camunda.zeebe.test.util.record.RecordingExporter;\n+import io.camunda.zeebe.test.util.record.RecordingExporterTestWatcher;\n+import java.util.Collection;\n+import java.util.Collections;\n+import java.util.List;\n+import java.util.Map;\n+import org.junit.ClassRule;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.junit.runners.Parameterized;\n+import org.junit.runners.Parameterized.Parameters;\n+\n+@RunWith(Parameterized.class)\n+public class CreateProcessInstanceSupportedElementTest {\n+\n+ @ClassRule public static final EngineRule ENGINE = EngineRule.singlePartition();\n+ private static final String PROCESS_ID = \"processId\";\n+ private static final String CHILD_PROCESS_ID = \"childProcessId\";\n+ private static final String START_ELEMENT_ID = \"startElement\";\n+ private static final String MESSAGE = \"message\";\n+ private static final String JOBTYPE = \"jobtype\";\n+\n+ @Rule\n+ public final RecordingExporterTestWatcher recordingExporterTestWatcher =\n+ new RecordingExporterTestWatcher();\n+\n+ private final Scenario scenario;\n+\n+ public CreateProcessInstanceSupportedElementTest(final Scenario scenario) {\n+ this.scenario = scenario;\n+ }\n+\n+ @Parameters(name = \"{0}\")\n+ public static Collection<Object> scenarios() {\n+ return List.of(\n+ new Scenario(\n+ BpmnElementType.SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .subProcess(START_ELEMENT_ID)\n+ .embeddedSubProcess()\n+ .startEvent()\n+ .subProcessDone()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_SUB_PROCESS,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .eventSubProcess(\n+ START_ELEMENT_ID, e -> e.startEvent().timerWithDuration(\"PT1H\").endEvent())\n+ .startEvent()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_CATCH_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.INTERMEDIATE_THROW_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateThrowEvent(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.END_EVENT,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().endEvent(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SERVICE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.RECEIVE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .receiveTask(START_ELEMENT_ID)\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.USER_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID).startEvent().userTask(START_ELEMENT_ID).done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.MANUAL_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .manualTask(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EXCLUSIVE_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .exclusiveGateway(START_ELEMENT_ID)\n+ .defaultFlow()\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.PARALLEL_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .parallelGateway(START_ELEMENT_ID)\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.EVENT_BASED_GATEWAY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .eventBasedGateway(START_ELEMENT_ID)\n+ .intermediateCatchEvent()\n+ .message(b -> b.name(MESSAGE).zeebeCorrelationKeyExpression(\"correlationKey\"))\n+ .moveToLastGateway()\n+ .intermediateCatchEvent()\n+ .timerWithDuration(\"PT1H\")\n+ .done(),\n+ Map.of(\"correlationKey\", \"value\")),\n+ new Scenario(\n+ BpmnElementType.MULTI_INSTANCE_BODY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .serviceTask(\n+ START_ELEMENT_ID,\n+ t ->\n+ t.zeebeJobType(JOBTYPE)\n+ .multiInstance(m -> m.parallel().zeebeInputCollectionExpression(\"[1]\")))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.CALL_ACTIVITY,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .callActivity(START_ELEMENT_ID, c -> c.zeebeProcessId(CHILD_PROCESS_ID))\n+ .endEvent()\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.BUSINESS_RULE_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .businessRuleTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SCRIPT_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .scriptTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()),\n+ new Scenario(\n+ BpmnElementType.SEND_TASK,\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .sendTask(START_ELEMENT_ID, b -> b.zeebeJobType(JOBTYPE))\n+ .done(),\n+ Collections.emptyMap()));\n+ }\n+\n+ @Test\n+ public void testProcessInstanceCanStartAtElementType() {\n+ // given\n+ ENGINE.deployment().withXmlResource(scenario.modelInstance).deploy();\n+ if (scenario.type == BpmnElementType.CALL_ACTIVITY) {\n+ ENGINE.deployment().withXmlResource(getChildProcess()).deploy();\n+ }\n+\n+ // when\n+ final long instanceKey =\n+ ENGINE\n+ .processInstance()\n+ .ofBpmnProcessId(PROCESS_ID)\n+ .withStartInstruction(START_ELEMENT_ID)\n+ .withVariables(scenario.variables)\n+ .create();\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.processInstanceRecords()\n+ .withProcessInstanceKey(instanceKey)\n+ .onlyEvents()\n+ .limit(\n+ r ->\n+ r.getValue().getBpmnElementType() == scenario.type\n+ && r.getIntent() == ProcessInstanceIntent.ELEMENT_ACTIVATED))\n+ .extracting(record -> record.getValue().getBpmnElementType(), Record::getIntent)\n+ .containsSequence(\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(BpmnElementType.PROCESS, ProcessInstanceIntent.ELEMENT_ACTIVATED),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATING),\n+ tuple(scenario.type, ProcessInstanceIntent.ELEMENT_ACTIVATED));\n+ }\n+\n+ private BpmnModelInstance getChildProcess() {\n+ return Bpmn.createExecutableProcess(CHILD_PROCESS_ID).startEvent().endEvent().done();\n+ }\n+\n+ record Scenario(\n+ BpmnElementType type, BpmnModelInstance modelInstance, Map<String, Object> variables) {}\n+}\n"]
5
["d8d0ba8ea17ed43a04f90213851d2f27056d8cf0", "06960183db42cba1b1f1a8077660ba8c801c9e18", "9ed3c0c4a72af977fc9150512fb6538f20a94b22", "4be836f5655fb5356fde5ddd7437125f8574705d", "a5ecfdf49b0d4c43fbbbf7947be7c0327ccb3415"]
["feat", "fix", "docs", "refactor", "test"]
run nix macos jobs on macos-13 to try and avoid SIP,rebuild when environment variables change (#11471),implement array flatten support,convert to record,add system get version info Fiddle example (#20536)
["diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml\nnew file mode 100644\nindex 0000000..5be7d17\n--- /dev/null\n+++ b/.github/actionlint.yaml\n@@ -0,0 +1,7 @@\n+self-hosted-runner:\n+ # Labels of self-hosted runner in array of strings.\n+ labels: [macos-13]\n+# Configuration variables in array of strings defined in your repository or\n+# organization. `null` means disabling configuration variables check.\n+# Empty array means no configuration variable is allowed.\n+config-variables: null\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\nindex e37346c..dce77e1 100644\n--- a/.github/workflows/nix.yml\n+++ b/.github/workflows/nix.yml\n@@ -37,7 +37,7 @@ jobs:\n - \"3.10\"\n - \"3.11\"\n include:\n- - os: macos-latest\n+ - os: macos-13\n python-version: \"3.10\"\n steps:\n - name: checkout\ndiff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 005a850..8db22e2 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -3,7 +3,7 @@ ci:\n autofix_prs: false\n autoupdate_commit_msg: \"chore(deps): pre-commit.ci autoupdate\"\n skip:\n- - actionlint\n+ - actionlint-system\n - deadnix\n - just\n - nixpkgs-fmt\n@@ -17,9 +17,9 @@ default_stages:\n - commit\n repos:\n - repo: https://github.com/rhysd/actionlint\n- rev: v1.6.24\n+ rev: v1.6.25\n hooks:\n- - id: actionlint\n+ - id: actionlint-system\n - repo: https://github.com/psf/black\n rev: 23.3.0\n hooks:\n@@ -30,7 +30,7 @@ repos:\n - id: nbstripout\n exclude: .+/rendered/.+\n - repo: https://github.com/codespell-project/codespell\n- rev: v2.2.4\n+ rev: v2.2.5\n hooks:\n - id: codespell\n additional_dependencies:\n", "diff --git a/cli/build.rs b/cli/build.rs\nindex 548fbb5..d7bed21 100644\n--- a/cli/build.rs\n+++ b/cli/build.rs\n@@ -269,8 +269,17 @@ fn main() {\n // To debug snapshot issues uncomment:\n // op_fetch_asset::trace_serializer();\n \n- println!(\"cargo:rustc-env=TS_VERSION={}\", ts_version());\n+ if let Ok(c) = env::var(\"DENO_CANARY\") {\n+ println!(\"cargo:rustc-env=DENO_CANARY={}\", c);\n+ }\n+ println!(\"cargo:rerun-if-env-changed=DENO_CANARY\");\n+\n println!(\"cargo:rustc-env=GIT_COMMIT_HASH={}\", git_commit_hash());\n+ println!(\"cargo:rerun-if-env-changed=GIT_COMMIT_HASH\");\n+\n+ println!(\"cargo:rustc-env=TS_VERSION={}\", ts_version());\n+ println!(\"cargo:rerun-if-env-changed=TS_VERSION\");\n+\n println!(\n \"cargo:rustc-env=DENO_CONSOLE_LIB_PATH={}\",\n deno_console::get_declaration().display()\n@@ -322,9 +331,6 @@ fn main() {\n \n println!(\"cargo:rustc-env=TARGET={}\", env::var(\"TARGET\").unwrap());\n println!(\"cargo:rustc-env=PROFILE={}\", env::var(\"PROFILE\").unwrap());\n- if let Ok(c) = env::var(\"DENO_CANARY\") {\n- println!(\"cargo:rustc-env=DENO_CANARY={}\", c);\n- }\n \n let c = PathBuf::from(env::var_os(\"CARGO_MANIFEST_DIR\").unwrap());\n let o = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n", "diff --git a/ibis/backends/snowflake/registry.py b/ibis/backends/snowflake/registry.py\nindex 2373dd7..4ce03b0 100644\n--- a/ibis/backends/snowflake/registry.py\n+++ b/ibis/backends/snowflake/registry.py\n@@ -422,6 +422,7 @@ operation_registry.update(\n ops.ArrayZip: _array_zip,\n ops.ArraySort: unary(sa.func.array_sort),\n ops.ArrayRepeat: fixed_arity(sa.func.ibis_udfs.public.array_repeat, 2),\n+ ops.ArrayFlatten: fixed_arity(sa.func.array_flatten, 1),\n ops.StringSplit: fixed_arity(sa.func.split, 2),\n # snowflake typeof only accepts VARIANT, so we cast\n ops.TypeOf: unary(lambda arg: sa.func.typeof(sa.func.to_variant(arg))),\n", "diff --git a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\nindex cc998c6..65c8550 100755\n--- a/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/exporter/stream/ExporterDirectorDistributionTest.java\n@@ -167,13 +167,8 @@ public final class ExporterDirectorDistributionTest {\n * <p>This makes sure that even if we miss one export position event, we distribute the event\n * later again, which makes tests less flaky.\n */\n- private static final class ClockShifter implements ConditionEvaluationListener<Void> {\n-\n- private final ControlledActorClock clock;\n-\n- public ClockShifter(final ControlledActorClock clock) {\n- this.clock = clock;\n- }\n+ private record ClockShifter(ControlledActorClock clock)\n+ implements ConditionEvaluationListener<Void> {\n \n @Override\n public void conditionEvaluated(final EvaluatedCondition<Void> condition) {\n", "diff --git a/docs/fiddles/system/system-information/get-version-information/index.html b/docs/fiddles/system/system-information/get-version-information/index.html\nnew file mode 100644\nindex 0000000..0867bc3\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/index.html\n@@ -0,0 +1,26 @@\n+<!DOCTYPE html>\n+<html>\n+ <head>\n+ <meta charset=\"UTF-8\">\n+ </head>\n+ <body>\n+ <div>\n+ <div>\n+ <h1>Get version information</h1>\n+ <i>Supports: Win, macOS, Linux <span>|</span> Process: Both</i>\n+ <div>\n+ <div>\n+ <button id=\"version-info\">View Demo</button>\n+ <span id=\"got-version-info\"></span>\n+ </div>\n+ <p>The <code>process</code> module is built into Node.js (therefore you can use this in both the main and renderer processes) and in Electron apps this object has a few more useful properties on it.</p>\n+ <p>The example below gets the version of Electron in use by the app.</p>\n+ <p>See the <a href=\"http://electron.atom.io/docs/api/process\">process documentation <span>(opens in new window)</span></a> for more.</p>\n+ </div>\n+ </div>\n+ </div>\n+ </body>\n+ <script>\n+ require('./renderer.js')\n+ </script>\n+</html>\ndiff --git a/docs/fiddles/system/system-information/get-version-information/main.js b/docs/fiddles/system/system-information/get-version-information/main.js\nnew file mode 100644\nindex 0000000..1f9f917\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/main.js\n@@ -0,0 +1,25 @@\n+const { app, BrowserWindow } = require('electron')\n+\n+let mainWindow = null\n+\n+function createWindow () {\n+ const windowOptions = {\n+ width: 600,\n+ height: 400,\n+ title: 'Get version information',\n+ webPreferences: {\n+ nodeIntegration: true\n+ }\n+ }\n+\n+ mainWindow = new BrowserWindow(windowOptions)\n+ mainWindow.loadFile('index.html')\n+\n+ mainWindow.on('closed', () => {\n+ mainWindow = null\n+ })\n+}\n+\n+app.on('ready', () => {\n+ createWindow()\n+})\ndiff --git a/docs/fiddles/system/system-information/get-version-information/renderer.js b/docs/fiddles/system/system-information/get-version-information/renderer.js\nnew file mode 100644\nindex 0000000..40f7f2c\n--- /dev/null\n+++ b/docs/fiddles/system/system-information/get-version-information/renderer.js\n@@ -0,0 +1,8 @@\n+const versionInfoBtn = document.getElementById('version-info')\n+\n+const electronVersion = process.versions.electron\n+\n+versionInfoBtn.addEventListener('click', () => {\n+ const message = `This app is using Electron version: ${electronVersion}`\n+ document.getElementById('got-version-info').innerHTML = message\n+})\n"]
5
["54cb6d4643b4a072ff997592a7fa14a69a6c068d", "63546c15bfb1284ac6d956eee274e6d7cf263a8f", "d3c754f09502be979e5dcc79f968b15052590bd0", "3346331a963766c8193170fb130adad2e658ada2", "16d4ace80096557fb3fd48396aa09107241c3131"]
["ci", "build", "feat", "refactor", "docs"]
fetch git history,update version (nightly.0),use new freespace config for disk space recory test,use a closure,allow users to share their playground session
["diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex 6726e35..9114eeb 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -172,6 +172,8 @@ jobs:\n \n steps:\n - uses: actions/checkout@v3\n+ with:\n+ fetch-depth: 0\n - run: corepack enable\n - uses: actions/setup-node@v3\n with:\ndiff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\nindex aa33d76..1d1cbc9 100644\n--- a/scripts/bump-edge.ts\n+++ b/scripts/bump-edge.ts\n@@ -18,7 +18,7 @@ async function main () {\n \n const config = await loadChangelogConfig(process.cwd())\n \n- const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0', 'main']).stdout\n+ const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0']).stdout\n \n const commits = await getGitDiff(latestTag)\n const bumpType = determineSemverChange(parseCommits(commits, config), config)\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex e6f659c..cf93556 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -126,7 +126,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -134,7 +134,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex c58299b..6e51b6e 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.11\"\n+version = \"0.6.12-nightly.0\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full-repl = [\"erg_common/full-repl\"]\n full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.11\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.11\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.11\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.23\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.12-nightly.0\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.24-nightly.0\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 5f005a1..e1a9964 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.23\"\n+version = \"0.1.24-nightly.0\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\nindex 0854323..bfc7b7e 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryIT.java\n@@ -47,7 +47,8 @@ final class DiskSpaceRecoveryIT {\n .withZeebeData(volume)\n .withEnv(\"ZEEBE_BROKER_DATA_LOGSEGMENTSIZE\", \"1MB\")\n .withEnv(\"ZEEBE_BROKER_NETWORK_MAXMESSAGESIZE\", \"1MB\")\n- .withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.5\");\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"10MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"1MB\");\n \n private ZeebeClient client;\n \n@@ -127,7 +128,9 @@ final class DiskSpaceRecoveryIT {\n ContainerEngine.builder()\n .withDebugReceiverPort(SocketUtil.getNextAddress().getPort())\n .withContainer(\n- container.withEnv(\"ZEEBE_BROKER_DATA_DISKUSAGECOMMANDWATERMARK\", \"0.0001\"))\n+ container\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_PROCESSING\", \"16MB\")\n+ .withEnv(\"ZEEBE_BROKER_DATA_DISK_FREESPACE_REPLICATION\", \"10MB\"))\n .build();\n \n @BeforeEach\n", "diff --git a/ibis/expr/analysis.py b/ibis/expr/analysis.py\nindex bb17a7a..975c658 100644\n--- a/ibis/expr/analysis.py\n+++ b/ibis/expr/analysis.py\n@@ -39,7 +39,9 @@ def sub_for(expr, substitutions):\n An Ibis expression\n \"\"\"\n \n- def fn(node, mapping={k.op(): v for k, v in substitutions}):\n+ mapping = {k.op(): v for k, v in substitutions}\n+\n+ def fn(node):\n try:\n return mapping[node]\n except KeyError:\n", "diff --git a/playground/docker-compose.yml b/playground/docker-compose.yml\nnew file mode 100644\nindex 0000000..b8ac6aa\n--- /dev/null\n+++ b/playground/docker-compose.yml\n@@ -0,0 +1,18 @@\n+version: '3.3'\n+\n+services:\n+ db:\n+ container_name: panda-mysql\n+ image: mariadb:10.7.1-focal\n+ restart: always\n+ ports:\n+ - 3310:3306\n+ environment:\n+ MARIADB_ROOT_PASSWORD: root\n+ MARIADB_DATABASE: panda\n+ volumes:\n+ - panda-mysql:/var/lib/mysql\n+\n+volumes:\n+ panda-mysql:\n+ driver: local\ndiff --git a/playground/package.json b/playground/package.json\nindex eab6f62..0feccbb 100644\n--- a/playground/package.json\n+++ b/playground/package.json\n@@ -9,6 +9,9 @@\n \"start\": \"next start\",\n \"lint\": \"next lint\",\n \"dev\": \"next dev\",\n+ \"db:start\": \"docker-compose up -d\",\n+ \"db:stop\": \"docker-compose down\",\n+ \"db:push\": \"prisma db push --skip-generate\",\n \"db:generate\": \"prisma generate\",\n \"db:reset\": \"prisma migrate reset\",\n \"db:studio\": \"prisma studio\"\ndiff --git a/playground/prisma/dev.db b/playground/prisma/dev.db\ndeleted file mode 100644\nindex aa8281f..0000000\nBinary files a/playground/prisma/dev.db and /dev/null differ\ndiff --git a/playground/prisma/migrations/20230204163131_init/migration.sql b/playground/prisma/migrations/20230204163131_init/migration.sql\ndeleted file mode 100644\nindex b3c34f7..0000000\n--- a/playground/prisma/migrations/20230204163131_init/migration.sql\n+++ /dev/null\n@@ -1,8 +0,0 @@\n--- CreateTable\n-CREATE TABLE \"Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"config\" TEXT NOT NULL,\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\ndiff --git a/playground/prisma/migrations/20230208183556_/migration.sql b/playground/prisma/migrations/20230208183556_/migration.sql\ndeleted file mode 100644\nindex 619fd84..0000000\n--- a/playground/prisma/migrations/20230208183556_/migration.sql\n+++ /dev/null\n@@ -1,20 +0,0 @@\n-/*\n- Warnings:\n-\n- - You are about to drop the column `config` on the `Session` table. All the data in the column will be lost.\n-\n-*/\n--- RedefineTables\n-PRAGMA foreign_keys=OFF;\n-CREATE TABLE \"new_Session\" (\n- \"id\" TEXT NOT NULL PRIMARY KEY,\n- \"code\" TEXT NOT NULL,\n- \"theme\" TEXT NOT NULL DEFAULT '',\n- \"view\" TEXT NOT NULL DEFAULT 'code',\n- \"createdAt\" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP\n-);\n-INSERT INTO \"new_Session\" (\"code\", \"createdAt\", \"id\", \"view\") SELECT \"code\", \"createdAt\", \"id\", \"view\" FROM \"Session\";\n-DROP TABLE \"Session\";\n-ALTER TABLE \"new_Session\" RENAME TO \"Session\";\n-PRAGMA foreign_key_check;\n-PRAGMA foreign_keys=ON;\ndiff --git a/playground/prisma/migrations/20230529181831_init/migration.sql b/playground/prisma/migrations/20230529181831_init/migration.sql\nnew file mode 100644\nindex 0000000..ffe5546\n--- /dev/null\n+++ b/playground/prisma/migrations/20230529181831_init/migration.sql\n@@ -0,0 +1,9 @@\n+-- CreateTable\n+CREATE TABLE `Session` (\n+ `id` VARCHAR(191) NOT NULL,\n+ `code` TEXT NOT NULL,\n+ `theme` TEXT NOT NULL,\n+ `createdAt` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3),\n+\n+ PRIMARY KEY (`id`)\n+) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;\ndiff --git a/playground/prisma/migrations/migration_lock.toml b/playground/prisma/migrations/migration_lock.toml\nindex e5e5c47..e5a788a 100644\n--- a/playground/prisma/migrations/migration_lock.toml\n+++ b/playground/prisma/migrations/migration_lock.toml\n@@ -1,3 +1,3 @@\n # Please do not edit this file manually\n # It should be added in your version-control system (i.e. Git)\n-provider = \"sqlite\"\n\\ No newline at end of file\n+provider = \"mysql\"\n\\ No newline at end of file\ndiff --git a/playground/prisma/schema.prisma b/playground/prisma/schema.prisma\nindex e84678a..9e1281e 100644\n--- a/playground/prisma/schema.prisma\n+++ b/playground/prisma/schema.prisma\n@@ -2,16 +2,14 @@ generator client {\n provider = \"prisma-client-js\"\n }\n \n-// Using SQLite for local development\n datasource db {\n- provider = \"sqlite\"\n- url = \"file:dev.db\"\n+ provider = \"mysql\"\n+ url = env(\"DATABASE_URL\")\n }\n \n model Session {\n- id String @id\n- code String\n- theme String @default(\"\")\n- view String @default(\"code\")\n+ id String @id @default(cuid())\n+ code String @db.Text\n+ theme String @db.Text\n createdAt DateTime @default(now())\n }\ndiff --git a/playground/src/app/[id]/page.tsx b/playground/src/app/[id]/page.tsx\nindex 40c21f0..a88d2b9 100644\n--- a/playground/src/app/[id]/page.tsx\n+++ b/playground/src/app/[id]/page.tsx\n@@ -6,9 +6,9 @@ const Page = async (props: any) => {\n params: { id },\n } = props\n \n- const initialState = await prisma?.session.findFirst({\n+ const initialState = await prisma.session.findFirst({\n where: { id },\n- select: { code: true, theme: true, view: true },\n+ select: { code: true, theme: true },\n })\n \n return <Playground intialState={initialState} />\ndiff --git a/playground/src/components/Editor.tsx b/playground/src/components/Editor.tsx\nindex 8263dba..e82469a 100644\n--- a/playground/src/components/Editor.tsx\n+++ b/playground/src/components/Editor.tsx\n@@ -123,10 +123,7 @@ export const Editor = (props: EditorProps) => {\n \n return (\n <Flex flex=\"1\" direction=\"column\" align=\"flex-start\">\n- <Tabs\n- defaultValue={value.view}\n- className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}\n- >\n+ <Tabs defaultValue=\"code\" className={css({ flex: '1', width: 'full', display: 'flex', flexDirection: 'column' })}>\n <TabList\n className={css({\n px: '6',\ndiff --git a/playground/src/components/usePlayground.ts b/playground/src/components/usePlayground.ts\nindex 74b6069..a959fca 100644\n--- a/playground/src/components/usePlayground.ts\n+++ b/playground/src/components/usePlayground.ts\n@@ -4,7 +4,6 @@ import { Layout } from './LayoutControl'\n export type State = {\n code: string\n theme: string\n- view: string\n }\n \n export type UsePlayGroundProps = {\n@@ -51,7 +50,7 @@ export const App = () => {\n body: JSON.stringify(state),\n })\n .then((response) => response.json())\n- .then((data) => {\n+ .then(({ data }) => {\n history.pushState({ id: data.id }, '', data.id)\n setIsPristine(true)\n })\ndiff --git a/playground/src/pages/api/share.ts b/playground/src/pages/api/share.ts\nindex 23f8b9e..e6f3f26 100644\n--- a/playground/src/pages/api/share.ts\n+++ b/playground/src/pages/api/share.ts\n@@ -7,17 +7,16 @@ import { prisma } from '../../client/prisma'\n const schema = z.object({\n code: z.string(),\n theme: z.string(),\n- view: z.enum(['code', 'config']).optional(),\n })\n \n const handler = async (req: NextApiRequest, res: NextApiResponse) =>\n match(req)\n .with({ method: 'POST' }, async () => {\n try {\n- const { code, theme } = schema.parse(req.body)\n+ const data = schema.parse(req.body)\n const id = nanoid(10)\n- await prisma.session.create({ data: { id, code, theme } })\n- return res.status(200).json({ id })\n+ const session = await prisma.session.create({ data: { id, ...data }, select: { id: true } })\n+ return res.status(200).json({ success: true, data: session })\n } catch (e) {\n console.log(e)\n return res.status(500).json({ success: false })\n"]
5
["e11d55a4922978b89a2c50bf577124b09449e89c", "92e940efeee199b1e0bbbc3c9eea7f3dc8221619", "672cd2b9775fb6dac2d522cb3f4469db47c0556b", "ad52e1d67fd77f0b6a73fbf989b33f9abf395ecc", "9c2c7ea1d4935d30e014ca807a4f9cb1665b1e41"]
["ci", "build", "test", "refactor", "feat"]
use connect instead of begin to avoid nesting transactions,updates the readme to improve the readability and contributing sections,cancel in-progress dep update jobs when a new one arrives [skip ci],removing automatic page push on nav,updated riot to v6, fixed build
["diff --git a/ibis/backends/duckdb/__init__.py b/ibis/backends/duckdb/__init__.py\nindex 2006f59..bb2028e 100644\n--- a/ibis/backends/duckdb/__init__.py\n+++ b/ibis/backends/duckdb/__init__.py\n@@ -1180,7 +1180,7 @@ WHERE catalog_name = :database\"\"\"\n def _register_udfs(self, expr: ir.Expr) -> None:\n import ibis.expr.operations as ops\n \n- with self.begin() as con:\n+ with self.con.connect() as con:\n for udf_node in expr.op().find(ops.ScalarUDF):\n compile_func = getattr(\n self, f\"_compile_{udf_node.__input_type__.name.lower()}_udf\"\n", "diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md\nindex 3c4dd8d..f8b8514 100644\n--- a/.github/CONTRIBUTING.md\n+++ b/.github/CONTRIBUTING.md\n@@ -21,7 +21,8 @@ Contributions are always welcome! Please use the following guidelines when contr\n - `chore` - Catch all or things that have to do with the build system, etc\n - `examples` - Changes to existing example, or a new example\n * The `COMPONENT` is optional, and may be a single file, directory, or logical component. Can be omitted if commit applies globally\n-5. Run the tests (`cargo test --no-std-features && cargo test --features yaml`)\n+5. Run the tests (`cargo test --features \"yaml unstable\"`)\n+5. Run the lints (`cargo build --features lints`) (requires a nightly compiler)\n 6. `git rebase` into concise commits and remove `--fixup`s (`git rebase -i HEAD~NUM` where `NUM` is number of commits back)\n 7. Push your changes back to your fork (`git push origin $your-branch`)\n 8. Create a pull request! (You can also create the pull request first, and we'll merge when ready. This a good way to discuss proposed changes.)\ndiff --git a/README.md b/README.md\nindex 9e6efce..b74405d 100644\n--- a/README.md\n+++ b/README.md\n@@ -31,7 +31,9 @@ Table of Contents\n * [More Information](#more-information)\n * [Video Tutorials](#video-tutorials)\n * [How to Contribute](#how-to-contribute)\n- * [Running the tests](#running-the-tests)\n+ * [Testing Code](#testing-code)\n+ * [Linting Code](#linting-code)\n+ * [Debugging Code](#debugging-code)\n * [Goals](#goals)\n * [Compatibility Policy](#compatibility-policy)\n * [Minimum Version of Rust](#minimum-version-of-rust)\n@@ -43,288 +45,83 @@ Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc)\n \n ## What's New\n \n-Here's what's new in v2.18.0\n+Here's the highlights from v2.0.0 to v2.18.0\n \n * **Completions:** Adds completion support for Microsoft PowerShell! (Thanks to @Arnavion)\n-\n-Here's what's new in v2.17.1\n-\n-* Fixes a bug where using low index multiples was propagated to subcommands\n-\n-Here's what's new in v2.17.0\n-\n * Allows specifying the second to last positional argument as `multiple(true)` (i.e. things such as `mv <files>... <target>`)\n * Adds an `App::get_name` and `App::get_bin_name`\n-\n-Here's what's new in v2.16.4\n-\n-* Fixes bug that caused panic on subcommands with aliases\n * Conflicting argument errors are now symetrical, meaning more consistent and better usage suggestions\n-* Fixes typo in example `13a_enum_values_automatic`\n-* Fixes failing yaml example (#715)\n-* Fixes the `debug` feature (#716)\n-\n-Here's the highlights for v2.16.3\n-\n-* Fixes a bug where the derived display order isn't propagated\n-* **yaml-example:** fixes some inconsistent args in the example\n-\n-Here's the highlights for v2.16.2\n-\n-* Fixes a bug where single quotes are not escaped\n-\n-Here's the highlights for v2.16.1\n-\n-* **Help Message:** fixes a regression bug where args with multiple(true) threw off alignment\n-\n-Here's the highlights for v2.16.0\n-\n * **Completions:** adds automatic ZSH completion script generation support! :tada: :tada:\n-\n-Here's a gif of them in action!\n-\n-![zsh-comppletions](http://i.imgur.com/rwlMbAv.gif)\n-\n-Here's the highlights for v2.15.0\n-\n * **AppSettings:** adds new setting `AppSettings::AllowNegativeNumbers` which functions like `AllowLeadingHyphen` except only allows undefined negative numbers to pass parsing.\n-* Improves some of the documentation of `AppSettings` by moving variants into roughly alphabetical order\n-\n-Here's the highlights for v2.14.1 (Huge thanks to all the contributors who put in a lot of work this cycle! Especially @tormol @nabijaczleweli and @wdv4758h)\n-\n * Stabilize `clap_app!` macro (i.e. no longer need to use `unstable` feature)\n-* Fixes a bug that made determining when to auto-wrap long help messages inconsistent\n-* Fixes fish completions for nested subcommands\n-* Improve documentation around features\n-* Reword docs for `ErrorKind` and `App::settings`\n-* Fix tests that fail when the `suggestions` feature is disabled\n-* Fix the `OsString`-using doc-tests\n-* Tag non-rust code blocks as such instead of ignoring them\n-* Improve some errors about subcommands\n-* Makes sure the doc-tests don't fail before \"missing file\" in YAML tests\n * Deprecate `App::with_defaults`\n-* Make lints not enable other nightly-requiring features\n-\n-Here's the highlights for v2.14.0\n-\n-* One can now alias arguments either visibly (whichc appears in the help text) or invisibly just like subcommands!\n+* One can now alias arguments either visibly (which appears in the help text) or invisibly just like subcommands!\n * The `from_usage` parser now correctly handles non-ascii names / options and help!\n-* Fixes a bug in the `require_delimiter` code which caused some incorrect parses\n-* Fixes various typos in the docs\n-* Various other small performance improvements and enhancements\n-\n-Here's the highlights for v2.13.0\n-\n * **Value Delimiters:** fixes the confusion around implicitly setting value delimiters. (The default is to *not* use a delimiter unless explicitly set)\n-* **Docs:** Updates README.md with new website information and updated video tutorials info\n-* **Docs:** Updates the docs about removing implicit `value_delimiter(true)`\n-* **Docs:** Adds better examples on using default values\n-\n-\n-Here's the highlights for v2.12.1\n-\n-* Fixes a regression-bug where the old `{n}` newline char stopped being replaced a properly re-aligned newline\n-\n-Here's the highlights for v2.12.0\n-\n * Changes the default value delimiter rules (i.e. the default is `use_delimiter(false)` *unless* a setting/method that implies multiple values was used) **[Bugfix that *may* \"break\" code]**\n * If code breaks, simply add `Arg::use_delimiter(true)` to the affected args\n-* Updates the docs for the `Arg::multiple` method WRT value delimiters and default settings\n * Adds ability to hide the possible values from the help text on a per argument basis, instead of command wide\n * Allows for limiting detected terminal width (i.e. wrap at `x` length, unless the terminal width is *smaller*)\n-* Removes some redundant `contains()` checks for minor performance improvements\n-* Fixes a bug where valid args aren't recognized with the `AppSettings::AllowLeadingHyphen` setting\n * `clap` now ignores hard newlines in help messages and properly re-aligns text, but still wraps if the term width is too small\n-* Makes some minor changes to when next line help is automatically used\n * Adds support for the setting `Arg::require_delimiter` from YAML\n-* Removes the verbage about using `'{n}'` to insert newlines in help text from the docs (the normal `\\n` can now be used)\n-* Documents `AppSetting::DisableVersion`\n-\n-Here's the highlights for v2.11.3\n-\n * `clap` no longer requires one to use `{n}` inside help text to insert a newline that is properly aligned. One can now use the normal `\\n`.\n * `clap` now ignores hard newlines in help messages and properly re-aligns text, but still wraps if the term width is too small\n-* Supports setting `Arg::require_delimiter` from YAML\n-\n-Here's the highlights for v2.11.2\n-\n-* Makes some minor changes to when next line help is automatically used for improved wrapping\n-\n-Here's the highlights for v2.11.1\n-\n-* Fixes an issue where settings weren't propogated down through grand-child subcommands\n * Errors can now have custom description\n * Uses `term_size` instead of home-grown solution on Windows\n-* Updates deps with some minor bug fixes\n-\n-\n-Here's the highlights for v2.11.0\n-\n * Adds the ability to wrap help text intelligently on Windows!\n-* Moves docs to [docs.rs!](https://docs.rs/clap/)\n-* Fixes some usage strings that contain both args in groups and ones that conflict with each other\n-* Uses standard conventions for bash completion files, namely `{bin}.bash-completion`\n+* Moves docs to [docs.rs!](https://docs.rs/clap/)!\n * Automatically moves help text to the next line and wraps when term width is determined to be too small, or help text is too long\n * Vastly improves *development* error messages when using YAML\n-* Adds `App::with_defaults` to automatically use `crate_authors!` and `crate_version!` macros\n-* Other minor improvements and bug fixes\n-\n-Here's the highlights for v2.10.4\n-\n-* Fixes a bug where help is wrapped incorrectly and causing a panic with some non-English characters\n-\n-Here's the highlights for v2.10.3\n-\n-* Fixes a bug with non-English characters in help text wrapping, where the character is stripped or causes a panic\n-* Fixes an issue with `strsim` which caused a panic in some scenarios\n * Adds a shorthand way to ignore help text wrapping and use source formatting (i.e. `App::set_term_width(0)`)\n-\n-Here's the highlights for v2.10.2\n-\n-* Fixes a critical bug where the help message is printed twice\n-\n-Here's the highlights for v2.10.1\n-\n * **Help Subcommand:** fixes misleading usage string when using multi-level subcommmands such as `myprog help subcmd1 subcmd2`\n * **YAML:** allows using lists or single values with certain arg declarations for increased ergonomics\n-\n-\n-Here's the highlights for v2.10.0\n-\n-\n * **Fish Shell Completions:** one can generate a basic fish completions script at compile time!\n-* **External SubCommands:** fixes a bug which now correctly preserves external subcommand name along with args to said command (Minor breaking change that breaks no known real world code)\n-* **YAML Documentation:** fixes example 17's incorrect reference to arg_groups instead of groups\n-\n-\n-Here's the highlights for v2.9.3\n-\n * Adds the ability to generate completions to an `io::Write` object\n * Adds an `App::unset_setting` and `App::unset_settings`\n-* Fixes bug where only first arg in list of `required_unless_one` is recognized\n-* Fixes a typo bug `SubcommandsRequired`->`SubcommandRequired`\n-\n-\n-Here's the highlights for v2.9.2\n-\n-\n-* fixes bug where --help and --version short weren't added to the completion list\n-* improves completions allowing multiple bins to have seperate completion files\n-\n-Here's the highlights for v2.9.0\n-\n * **Completions:** one can now [generate a bash completions](https://docs.rs/clap/2.9.0/clap/struct.App.html#method.gen_completions) script at compile time! These completions work with options using [possible values](https://docs.rs/clap/2.9.0/clap/struct.Arg.html#method.possible_values), [subcommand aliases](https://docs.rs/clap/2.9.0/clap/struct.App.html#method.aliases), and even multiple levels of subcommands\n-* Minor bug fixes when using `AppSettings::TrailingVarArg` and `AppSettings::AllowLeadingHyphen`\n-\n-Here's the highlights for v2.8.0\n-\n * **Arg:** adds new optional setting [`Arg::require_delimiter`](https://docs.rs/clap/2.8.0/clap/struct.Arg.html#method.require_delimiter) which requires val delimiter to parse multiple values\n * The terminal sizing portion has been factored out into a separate crate, [term_size](https://crates.io/crates/term_size)\n-* Minor bug fixes\n-\n-\n-Here's the highlights for v2.7.1\n-\n-* **Options:**\n- * options using multiple values and delimiters no longer parse additional values after a trailing space (i.e. `prog -o 1,2 file.txt` parses as `1,2` for `-o` and `file.txt` for a positional arg)\n- * using options using multiple values and with an `=` no longer parse args after the trailing space as values (i.e. `prog -o=1 file.txt` parses as `1` for `-o` and `file.txt` for a positional arg)\n-\n-Here's the highlights for v2.7.0\n-\n+* Options using multiple values and delimiters no longer parse additional values after a trailing space (i.e. `prog -o 1,2 file.txt` parses as `1,2` for `-o` and `file.txt` for a positional arg)\n+* Using options using multiple values and with an `=` no longer parse args after the trailing space as values (i.e. `prog -o=1 file.txt` parses as `1` for `-o` and `file.txt` for a positional arg)\n * **Usage Strings:** `[FLAGS]` and `[ARGS]` are no longer blindly added to usage strings, instead only when applicable\n * `arg_enum!`: allows using more than one meta item, or things like `#[repr(C)]` with `arg_enum!`s\n * `App::print_help`: now prints the same as would have been printed by `--help` or the like\n-* **Help Messages:**\n- * prevents invoking `<cmd> help help` and displaying incorrect help message\n- * subcommand help messages requested via `<cmd> help <sub>` now correctly match `<cmd> <sub> --help`\n-* **`ArgGroup`s:**\n- * one can now specify groups which require AT LEAST one of the args\n- * allows adding multiple ArgGroups per Arg\n- * **Documentation:** vastly improves `ArgGroup` docs by adding better examples\n-* **Documentation:** fixes a bunch of typos in the documentation\n-\n-Here's the highlights for v2.6.0\n-\n+* Prevents invoking `<cmd> help help` and displaying incorrect help message\n+* Subcommand help messages requested via `<cmd> help <sub>` now correctly match `<cmd> <sub> --help`\n+* One can now specify groups which require AT LEAST one of the args\n+* Allows adding multiple ArgGroups per Arg\n * **Global Settings:** One can now set an `AppSetting` which is propogated down through child subcommands\n * **Terminal Wrapping:** Allows wrapping at specified term width (Even on Windows!) (can now set an absolute width to \"smart\" wrap at)\n * **SubCommands/Aliases:** adds support for visible aliases for subcommands (i.e. aliases that are dipslayed in the help message)\n * **Subcommands/Aliases:** when viewing the help of an alias, it now display help of the aliased subcommand\n-* Improves the default usage string when only a single positional arg is present\n * Adds new setting to stop delimiting values with `--` or `AppSettings::TrailingVarArg`\n-* `App::before_help` and `App::after_help` now correctly wrap\n-* Fixes bug where positional args are printed out of order when using templates\n-* Fixes bug where one can't override the auto-generated version or help flags\n-* Fixes issue where `App::before_help` wasn't printed\n-* Fixes a failing windows build\n-* Fixes bug where new color settings couldn't be converted from strings\n-* Adds missing YAML methods for App and Arg\n-* Allows printing version to any io::Write object\n-* Removes extra newline from help and version output\n-\n-Here's what's new in v.2.5.2\n-\n-* Removes trailing newlines from help and version output\n-* Allows printing version to any io::Write object\n-* Inter-links all types and pages\n-* Makes all publicly available types viewable in docs\n-* Fixes bug where one can't override version or help flags\n-* Fixes bug where args are printed out of order when using templates\n-* Fixes issue where `App::before_help` wasn't printed properly\n-\n-Here's what's new in v.2.5.0\n-\n * Subcommands now support aliases - think of them as hidden subcommands that dispatch to said subcommand automatically\n-\n-Here's what's new in v2.4.3\n-\n-* Bug Fixes\n- * Usage strings get de-deuplicated when there are args which are also part ``ArgGroup`s`\n- * Fixed times when `ArgGroup`s are duplicated in usage strings\n-* Improvements\n- * Positional arguments which are part of a group are now formatted in a more readable way (fewer brackets)\n- * Positional arguments use the standard `<>` brackets to reduce confusion\n- * The default help string for the `help` subcommand has been shortened to fit in 80 columns\n-\n-Here's the highlights from v2.4.0\n-\n+* Fixed times when `ArgGroup`s are duplicated in usage strings\n * **Before Help:** adds support for displaying info before help message\n * **Required Unless:** adds support for allowing args that are required unless certain other args are present\n-* Bug fixes\n-\n-Here's the highlights from v2.3.0\n-\n * **New Help Template Engine!**: Now you have full control over the layout of your help message. Major thanks to @hgrecco\n * **Pull crate Authors from Cargo.toml**: One can now use the `crate_authors!` macro to automatically pull the crate authors from their Cargo.toml file\n * **Colored Help Messages**: Help messages can now be optionally colored (See the `AppSettings::ColoredHelp` setting). Screenshot below.\n-* A bunch of bug fixes\n-\n-Here's the highlights from v2.2.1\n-\n * **Help text auto wraps and aligns at for subcommands too!** - Long help strings of subcommands will now properly wrap and align to term width on Linux and OS X. This can be turned off as well.\n-* Bug fixes\n-\n-An example of the optional colored help:\n-\n-![screenshot](http://i.imgur.com/7fs2h5j.png)\n-\n-Here's the highlights from v2.2.0\n-\n * **Help text auto wraps and aligns at term width!** - Long help strings will now properly wrap and align to term width on Linux and OS X (and presumably Unix too). This can be turned off as well.\n * **Can customize the order of opts, flags, and subcommands in help messages** - Instead of using the default alphabetical order, you can now re-arrange the order of your args and subcommands in help message. This helps to emphasize more popular or important options.\n- * **Can auto-derive the order from declaration order** - Have a bunch of args or subcommmands to re-order? You can now just derive the order from the declaration order!\n+* **Can auto-derive the order from declaration order** - Have a bunch of args or subcommmands to re-order? You can now just derive the order from the declaration order!\n * **Help subcommand now accepts other subcommands as arguments!** - Similar to other CLI precedents, the `help` subcommand can now accept other subcommands as arguments to display their help message. i.e. `$ myprog help mysubcmd` (*Note* these can even be nested heavily such as `$ myprog help subcmd1 subcmd2 subcmd3` etc.)\n+* **Default Values**: Args can now specify default values\n+* **Next Line Help**: Args can have help strings on the line following the argument (useful for long arguments, or those with many values). This can be set command-wide or for individual args\n \n-* Other minor bug fixes\n+Here's a gif of them in action!\n+\n+![zsh-comppletions](http://i.imgur.com/rwlMbAv.gif)\n \n An example of the help text wrapping at term width:\n \n ![screenshot](http://i.imgur.com/PAJzJJG.png)\n \n-In v2.1.2\n+An example of the optional colored help:\n+\n+![screenshot](http://i.imgur.com/7fs2h5j.png)\n \n- * **Default Values**: Args can now specify default values\n- * **Next Line Help**: Args can have help strings on the line following the argument (useful for long arguments, or those with many values). This can be set command-wide or for individual args\n- * **Documentation Examples**: The examples in the documentation have been vastly improved\n \n For full details, see [CHANGELOG.md](https://github.com/kbknapp/clap-rs/blob/master/CHANGELOG.md)\n \n@@ -697,6 +494,7 @@ features = [ \"suggestions\", \"color\" ]\n #### Opt-in features\n \n * **\"yaml\"**: Enables building CLIs from YAML documents. (builds dependency `yaml-rust`)\n+* **\"unstable\"**: Enables unstable `clap` features that may change from release to release\n \n ### Dependencies Tree\n \n@@ -707,6 +505,7 @@ The following graphic depicts `clap`s dependency graph (generated using [cargo-g\n * **Blue** Color: Dev dependency, only used while developing.\n \n ![clap dependencies](clap_dep_graph.png)\n+\n ### More Information\n \n You can find complete documentation on the [docs.rs](https://docs.rs/clap/) for this project.\n@@ -727,20 +526,65 @@ Another really great way to help is if you find an interesting, or helpful way i\n \n Please read [CONTRIBUTING.md](.github/CONTRIBUTING.md) before you start contributing.\n \n+\n+### Testing Code\n+\n To test with all features both enabled and disabled, you can run theese commands:\n \n ```sh\n $ cargo test --no-default-features\n-$ cargo test --features yaml\n+$ cargo test --features \"yaml unstable\"\n ```\n \n-If you have a nightly compiler you can append `--features lints` to both commands\n-to get style warnings and code smells; If you get one from code you think is fine,\n-you can ignore it by prepending `#[cfg_attr(feature=\"lints\", allow(lint_name))]`\n-to the function or impl block.\n+Alternatively, if you have [`just`](https://github.com/casey/just) installed you can run the prebuilt recipies. *Not* using `just` is prfeclty fine as well, it simply bundles commands automatically.\n+\n+For example, to test the code, as above simply run:\n+\n+```sh\n+$ just run-tests`\n+```\n+\n+From here on, I will lis the appropriate `cargo` command as well as the `just` command.\n+\n+Sometimes it's helpful to only run a subset of the tests, which can be done via:\n+\n+```sh\n+$ cargo test --test <test_name>\n+\n+# Or\n+\n+$ just run-test <test_name>\n+```\n \n-If you are debugging (or just trying to understand the code) you can enable the\n-\"debug\" feature which will trace function calls and brances in some parts of the code.\n+### Linting Code\n+\n+During the CI process `clap` runs against many different lints using [`clippy`](https://github.com/Manishearth/rust-clippy). In order to check if these lints pass on your own computer prior to submitting a PR you'll need a nightly compiler.\n+\n+In order to check the code for lints run either:\n+\n+```sh\n+$ rustup override add nightly\n+$ cargo build --features lints\n+$ rustup override remove\n+\n+# Or\n+\n+$ just lint\n+```\n+\n+### Debugging Code\n+\n+Another helpful technique is to see the `clap` debug output while developing features. In order to see the debug output while running the full test suite or individual tests, run:\n+\n+```sh\n+$ cargo test --features debug\n+\n+# Or for individual tests\n+$ cargo test --test <test_name> --features debug\n+\n+# The corresponding just command for individual debugging tests is:\n+$ just debug <test_name>\n+```\n \n ### Goals\n \n", "diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml\nindex 3a71e29..25f6f27 100644\n--- a/.github/workflows/update-deps.yml\n+++ b/.github/workflows/update-deps.yml\n@@ -4,6 +4,11 @@ on:\n # run every 24 hours at midnight\n - cron: \"0 */24 * * *\"\n workflow_dispatch:\n+\n+concurrency:\n+ group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}\n+ cancel-in-progress: true\n+\n jobs:\n generate_updates:\n runs-on: ubuntu-latest\n", "diff --git a/ionic/components/nav/test/basic/index.ts b/ionic/components/nav/test/basic/index.ts\nindex 4b1a8ea..2834f68 100644\n--- a/ionic/components/nav/test/basic/index.ts\n+++ b/ionic/components/nav/test/basic/index.ts\n@@ -63,12 +63,6 @@ class FirstPage {\n }\n }\n \n- onPageDidEnter() {\n- setTimeout(() => {\n- this.nav.push(PrimaryHeaderPage);\n- }, 1000);\n- }\n-\n setPages() {\n let items = [\n PrimaryHeaderPage\n", "diff --git a/components/riot/package.json b/components/riot/package.json\nindex c41743a..eb69756 100644\n--- a/components/riot/package.json\n+++ b/components/riot/package.json\n@@ -61,7 +61,7 @@\n },\n \"devDependencies\": {\n \"@babel/preset-typescript\": \"^7.14.5\",\n- \"@riotjs/cli\": \"^6.0.4\",\n+ \"@riotjs/cli\": \"^6.0.5\",\n \"@riotjs/compiler\": \"^6.0.0\",\n \"chai\": \"^4.3.4\",\n \"esm\": \"^3.2.25\",\n"]
5
["6889543bec720e7e7da66535e1012cb66edfe081", "eb51316cdfdc7258d287ba13b67ef2f42bd2b8f6", "c2300c94c6b7d1599387272b616e1d79e93723c7", "cd9e6a2ab17c5961b0f977bb8a06f8545da49a97", "5d256f937f93e5a5ed003df86d38c44834095a11"]
["fix", "docs", "ci", "test", "build"]
add descriptions to buttons on hover,only restart if pages directory itself is changed resolves #429,rework RaftCommittedEntryListener Iterate over RaftCommittedEntryListener and refactor the listener such it serves the actual need. We have some services (to be specific the AsyncSnapshotDirector) which need the committed position, and want to listen to new updates. In raft we know which record we are committing and whether it was an application record so we can pass this information threw the listeners. This avoids to pass in the whole IndexedRecord object, and reduce the potential of going out of OOM because of keeping to much data in heap (when commit is not possible).,update CI images from docker buster to bullseye This will break `perf_image` until the new CI image is built due to the newly required `--all-tags` parameter to `docker push` that isn't available for the docker version we run on buster.,improve test stability * improve test stability by waiting until the message subscription is opened. Message subscriptions are opened outside of the context of the stream processor. Sometimes this may take a while. * enable running the tests repeatably by fixing the engine rule
["diff --git a/benchmarks/main.mjs b/benchmarks/main.mjs\nindex 0c2dc6b..e2f79d4 100644\n--- a/benchmarks/main.mjs\n+++ b/benchmarks/main.mjs\n@@ -65,8 +65,9 @@ const vnode = () =>\n },\n style: style({ margin: '5px' }),\n disabled,\n+ title: suite.name.split(' | ')[1],\n },\n- [suite.name],\n+ [suite.name.split(' | ')[0]],\n ),\n ),\n m(\ndiff --git a/benchmarks/suites/appendManyRowsToLargeTable.mjs b/benchmarks/suites/appendManyRowsToLargeTable.mjs\nindex e6a034e..7e34ca3 100644\n--- a/benchmarks/suites/appendManyRowsToLargeTable.mjs\n+++ b/benchmarks/suites/appendManyRowsToLargeTable.mjs\n@@ -31,7 +31,9 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('append many rows to large table');\n+const suite = new benchmark.Suite(\n+ 'append many rows to large table | appending 1,000 to a table of 10,000 rows.',\n+);\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/clearRows.mjs b/benchmarks/suites/clearRows.mjs\nindex ad47036..2a7711b 100644\n--- a/benchmarks/suites/clearRows.mjs\n+++ b/benchmarks/suites/clearRows.mjs\n@@ -27,7 +27,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(row);\n });\n \n-const suite = new benchmark.Suite('clear rows');\n+const suite = new benchmark.Suite('clear rows | clearing a table with 1,000 rows');\n \n const hoistedVNode = m('table', undefined, [], VFlags.NO_CHILDREN);\n \ndiff --git a/benchmarks/suites/createManyRows.mjs b/benchmarks/suites/createManyRows.mjs\nindex 578f511..96c7b02 100644\n--- a/benchmarks/suites/createManyRows.mjs\n+++ b/benchmarks/suites/createManyRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create many rows');\n+const suite = new benchmark.Suite('create many rows | creating 10,000 rows');\n \n const hoistedVNode = m(\n 'div',\ndiff --git a/benchmarks/suites/createRows.mjs b/benchmarks/suites/createRows.mjs\nindex bfcc876..4d9ff57 100644\n--- a/benchmarks/suites/createRows.mjs\n+++ b/benchmarks/suites/createRows.mjs\n@@ -7,7 +7,7 @@ import benchmark from '../benchmark';\n import { m, patch } from '../../src/index';\n import { buildData } from '../data';\n \n-const suite = new benchmark.Suite('create rows');\n+const suite = new benchmark.Suite('create rows | creating 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/partialUpdate.mjs b/benchmarks/suites/partialUpdate.mjs\nindex 55948a9..c5f1de3 100644\n--- a/benchmarks/suites/partialUpdate.mjs\n+++ b/benchmarks/suites/partialUpdate.mjs\n@@ -34,7 +34,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('partial update');\n+const suite = new benchmark.Suite('partial update | updating every 10th row for 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/removeRow.mjs b/benchmarks/suites/removeRow.mjs\nindex aeb1e9a..31c7599 100644\n--- a/benchmarks/suites/removeRow.mjs\n+++ b/benchmarks/suites/removeRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('remove row');\n+const suite = new benchmark.Suite('remove row | removing one row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/replaceAllRows.mjs b/benchmarks/suites/replaceAllRows.mjs\nindex 9555ae4..7001667 100644\n--- a/benchmarks/suites/replaceAllRows.mjs\n+++ b/benchmarks/suites/replaceAllRows.mjs\n@@ -41,7 +41,7 @@ data2.forEach(({ id, label }) => {\n \n shuffleArray(data2);\n \n-const suite = new benchmark.Suite('replace all rows');\n+const suite = new benchmark.Suite('replace all rows | updating all 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/selectRow.mjs b/benchmarks/suites/selectRow.mjs\nindex 76be216..de69359 100644\n--- a/benchmarks/suites/selectRow.mjs\n+++ b/benchmarks/suites/selectRow.mjs\n@@ -30,7 +30,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('select row');\n+const suite = new benchmark.Suite('select row | highlighting a selected row');\n \n const hoistedVNode = m(\n 'table',\ndiff --git a/benchmarks/suites/swapRows.mjs b/benchmarks/suites/swapRows.mjs\nindex 2a91e74..ce52036 100644\n--- a/benchmarks/suites/swapRows.mjs\n+++ b/benchmarks/suites/swapRows.mjs\n@@ -36,7 +36,7 @@ data2.forEach(({ id, label }) => {\n el2.appendChild(tr);\n });\n \n-const suite = new benchmark.Suite('swap rows');\n+const suite = new benchmark.Suite('swap rows | swap 2 rows for table with 1,000 rows');\n \n const hoistedVNode = m(\n 'table',\n", "diff --git a/packages/cli/src/commands/dev.ts b/packages/cli/src/commands/dev.ts\nindex 35d859e..d6d91ed 100644\n--- a/packages/cli/src/commands/dev.ts\n+++ b/packages/cli/src/commands/dev.ts\n@@ -1,4 +1,4 @@\n-import { resolve } from 'upath'\n+import { resolve, relative } from 'upath'\n import chokidar from 'chokidar'\n import debounce from 'debounce-promise'\n import type { Nuxt } from '@nuxt/kit'\n@@ -27,9 +27,9 @@ export default defineNuxtCommand({\n const { loadNuxt, buildNuxt } = requireModule('@nuxt/kit', rootDir) as typeof import('@nuxt/kit')\n \n let currentNuxt: Nuxt\n- const load = async (isRestart: boolean) => {\n+ const load = async (isRestart: boolean, reason?: string) => {\n try {\n- const message = `${isRestart ? 'Restarting' : 'Starting'} nuxt...`\n+ const message = `${reason ? reason + '. ' : ''}${isRestart ? 'Restarting' : 'Starting'} nuxt...`\n server.setApp(createLoadingHandler(message))\n if (isRestart) {\n console.log(message)\n@@ -59,12 +59,8 @@ export default defineNuxtCommand({\n const dLoad = debounce(load, 250)\n const watcher = chokidar.watch([rootDir], { ignoreInitial: true, depth: 1 })\n watcher.on('all', (_event, file) => {\n- // Ignore any changes to files within the Nuxt build directory\n- if (file.includes(currentNuxt.options.buildDir)) {\n- return\n- }\n- if (file.includes('nuxt.config') || file.includes('modules') || file.includes('pages')) {\n- dLoad(true)\n+ if (file.match(/nuxt\\.config\\.(js|ts|mjs|cjs)$|pages$/)) {\n+ dLoad(true, `${relative(rootDir, file)} updated`)\n }\n })\n \n", "diff --git a/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java b/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java\nnew file mode 100644\nindex 0000000..57c28a9\n--- /dev/null\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/RaftApplicationEntryCommittedPositionListener.java\n@@ -0,0 +1,31 @@\n+/*\n+ * Copyright 2016-present Open Networking Foundation\n+ * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.atomix.raft;\n+\n+/**\n+ * This listener will only be called by the Leader, when it commits an application entry.\n+ *\n+ * <p>If RAFT is currently running in a follower role, it will not call this listener.\n+ */\n+@FunctionalInterface\n+public interface RaftApplicationEntryCommittedPositionListener {\n+\n+ /**\n+ * @param committedPosition the new committed position which is related to the application entries\n+ */\n+ void onCommit(long committedPosition);\n+}\ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java b/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java\ndeleted file mode 100644\nindex 3d11d75..0000000\n--- a/atomix/cluster/src/main/java/io/atomix/raft/RaftCommittedEntryListener.java\n+++ /dev/null\n@@ -1,32 +0,0 @@\n-/*\n- * Copyright 2016-present Open Networking Foundation\n- * Copyright \u00a9 2020 camunda services GmbH ([email protected])\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.atomix.raft;\n-\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n-\n-/**\n- * This listener will only be called by the Leader, when it commits an entry. If RAFT is currently\n- * running in a follower role, it will not call this listener.\n- */\n-@FunctionalInterface\n-public interface RaftCommittedEntryListener {\n-\n- /**\n- * @param indexedRaftLogEntry the new committed entry\n- */\n- void onCommit(IndexedRaftLogEntry indexedRaftLogEntry);\n-}\ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java b/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\nindex 1f4ee98..c177cb1 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/impl/RaftContext.java\n@@ -27,8 +27,8 @@ import io.atomix.cluster.MemberId;\n import io.atomix.cluster.messaging.MessagingException.NoRemoteHandler;\n import io.atomix.cluster.messaging.MessagingException.NoSuchMemberException;\n import io.atomix.raft.ElectionTimer;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.atomix.raft.RaftCommitListener;\n-import io.atomix.raft.RaftCommittedEntryListener;\n import io.atomix.raft.RaftError;\n import io.atomix.raft.RaftException.ProtocolException;\n import io.atomix.raft.RaftRoleChangeListener;\n@@ -61,7 +61,6 @@ import io.atomix.raft.roles.PromotableRole;\n import io.atomix.raft.roles.RaftRole;\n import io.atomix.raft.storage.RaftStorage;\n import io.atomix.raft.storage.StorageException;\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n import io.atomix.raft.storage.log.RaftLog;\n import io.atomix.raft.storage.system.MetaStore;\n import io.atomix.raft.utils.StateUtil;\n@@ -115,7 +114,7 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n private final Set<Consumer<State>> stateChangeListeners = new CopyOnWriteArraySet<>();\n private final Set<Consumer<RaftMember>> electionListeners = new CopyOnWriteArraySet<>();\n private final Set<RaftCommitListener> commitListeners = new CopyOnWriteArraySet<>();\n- private final Set<RaftCommittedEntryListener> committedEntryListeners =\n+ private final Set<RaftApplicationEntryCommittedPositionListener> committedEntryListeners =\n new CopyOnWriteArraySet<>();\n private final Set<SnapshotReplicationListener> snapshotReplicationListeners =\n new CopyOnWriteArraySet<>();\n@@ -433,21 +432,23 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n * <p>Note that it will be called on the Raft thread, and as such should not perform any heavy\n * computation.\n *\n- * @param raftCommittedEntryListener the listener to add\n+ * @param raftApplicationEntryCommittedPositionListener the listener to add\n */\n public void addCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n- committedEntryListeners.add(raftCommittedEntryListener);\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n+ committedEntryListeners.add(raftApplicationEntryCommittedPositionListener);\n }\n \n /**\n * Removes registered committedEntryListener\n *\n- * @param raftCommittedEntryListener the listener to remove\n+ * @param raftApplicationEntryCommittedPositionListener the listener to remove\n */\n public void removeCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n- committedEntryListeners.remove(raftCommittedEntryListener);\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n+ committedEntryListeners.remove(raftApplicationEntryCommittedPositionListener);\n }\n \n /**\n@@ -464,7 +465,7 @@ public class RaftContext implements AutoCloseable, HealthMonitorable {\n *\n * @param committedEntry the most recently committed entry\n */\n- public void notifyCommittedEntryListeners(final IndexedRaftLogEntry committedEntry) {\n+ public void notifyApplicationEntryCommittedPositionListeners(final long committedEntry) {\n committedEntryListeners.forEach(listener -> listener.onCommit(committedEntry));\n }\n \ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java b/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\nindex 56c7172..d075fca 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/partition/impl/RaftPartitionServer.java\n@@ -21,8 +21,8 @@ import io.atomix.cluster.MemberId;\n import io.atomix.cluster.messaging.ClusterCommunicationService;\n import io.atomix.primitive.partition.Partition;\n import io.atomix.primitive.partition.PartitionMetadata;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.atomix.raft.RaftCommitListener;\n-import io.atomix.raft.RaftCommittedEntryListener;\n import io.atomix.raft.RaftRoleChangeListener;\n import io.atomix.raft.RaftServer;\n import io.atomix.raft.RaftServer.Role;\n@@ -205,16 +205,20 @@ public class RaftPartitionServer implements HealthMonitorable {\n }\n \n /**\n- * @see io.atomix.raft.impl.RaftContext#addCommittedEntryListener(RaftCommittedEntryListener)\n+ * @see\n+ * io.atomix.raft.impl.RaftContext#addCommittedEntryListener(RaftApplicationEntryCommittedPositionListener)\n */\n- public void addCommittedEntryListener(final RaftCommittedEntryListener commitListener) {\n+ public void addCommittedEntryListener(\n+ final RaftApplicationEntryCommittedPositionListener commitListener) {\n server.getContext().addCommittedEntryListener(commitListener);\n }\n \n /**\n- * @see io.atomix.raft.impl.RaftContext#removeCommittedEntryListener(RaftCommittedEntryListener)\n+ * @see\n+ * io.atomix.raft.impl.RaftContext#removeCommittedEntryListener(RaftApplicationEntryCommittedPositionListener)\n */\n- public void removeCommittedEntryListener(final RaftCommittedEntryListener commitListener) {\n+ public void removeCommittedEntryListener(\n+ final RaftApplicationEntryCommittedPositionListener commitListener) {\n server.getContext().removeCommittedEntryListener(commitListener);\n }\n \ndiff --git a/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java b/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\nindex e54df1a..fcfd177 100644\n--- a/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\n+++ b/atomix/cluster/src/main/java/io/atomix/raft/roles/LeaderRole.java\n@@ -630,27 +630,47 @@ public final class LeaderRole extends ActiveRole implements ZeebeLogAppender {\n \n private void replicate(final IndexedRaftLogEntry indexed, final AppendListener appendListener) {\n raft.checkThread();\n- appender\n- .appendEntries(indexed.index())\n- .whenCompleteAsync(\n- (commitIndex, commitError) -> {\n- if (!isRunning()) {\n- return;\n- }\n+ final var appendEntriesFuture = appender.appendEntries(indexed.index());\n+\n+ final boolean applicationEntryWasCommitted = indexed.isApplicationEntry();\n+ if (applicationEntryWasCommitted) {\n+ // We have some services which are waiting for the application records, especially position\n+ // to be committed. This is our glue code to notify them, instead of\n+ // passing the complete object (IndexedRaftLogEntry) threw the listeners and\n+ // keep them in heap until they are committed. This had the risk of going out of OOM\n+ // if records can't be committed, see https://github.com/camunda/zeebe/issues/14275\n+ final var committedPosition = indexed.getApplicationEntry().highestPosition();\n+ appendEntriesFuture.whenCompleteAsync(\n+ (commitIndex, commitError) -> {\n+ if (!isRunning()) {\n+ return;\n+ }\n+\n+ if (commitError == null) {\n+ raft.notifyApplicationEntryCommittedPositionListeners(committedPosition);\n+ }\n+ },\n+ raft.getThreadContext());\n+ }\n \n- // have the state machine apply the index which should do nothing but ensures it keeps\n- // up to date with the latest entries, so it can handle configuration and initial\n- // entries properly on fail over\n- if (commitError == null) {\n- appendListener.onCommit(indexed.index());\n- raft.notifyCommittedEntryListeners(indexed);\n- } else {\n- appendListener.onCommitError(indexed.index(), commitError);\n- // replicating the entry will be retried on the next append request\n- log.error(\"Failed to replicate entry: {}\", indexed, commitError);\n- }\n- },\n- raft.getThreadContext());\n+ appendEntriesFuture.whenCompleteAsync(\n+ (commitIndex, commitError) -> {\n+ if (!isRunning()) {\n+ return;\n+ }\n+\n+ // have the state machine apply the index which should do nothing but ensures it keeps\n+ // up to date with the latest entries, so it can handle configuration and initial\n+ // entries properly on fail over\n+ if (commitError == null) {\n+ appendListener.onCommit(indexed.index());\n+ } else {\n+ appendListener.onCommitError(indexed.index(), commitError);\n+ // replicating the entry will be retried on the next append request\n+ log.error(\"Failed to replicate entry: {}\", indexed, commitError);\n+ }\n+ },\n+ raft.getThreadContext());\n }\n \n public synchronized void onInitialEntriesCommitted(final Runnable runnable) {\ndiff --git a/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java b/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\nindex b217586..8029766 100644\n--- a/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\n+++ b/atomix/cluster/src/test/java/io/atomix/raft/RaftAppendTest.java\n@@ -82,7 +82,7 @@ public class RaftAppendTest {\n @Test\n public void shouldNotifyCommittedEntryListenerOnLeaderOnly() throws Throwable {\n // given\n- final var committedEntryListener = mock(RaftCommittedEntryListener.class);\n+ final var committedEntryListener = mock(RaftApplicationEntryCommittedPositionListener.class);\n raftRule.addCommittedEntryListener(committedEntryListener);\n \n // when\ndiff --git a/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java b/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\nindex 8f73cba..193a176 100644\n--- a/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\n+++ b/atomix/cluster/src/test/java/io/atomix/raft/RaftRule.java\n@@ -644,9 +644,12 @@ public final class RaftRule extends ExternalResource {\n }\n \n public void addCommittedEntryListener(\n- final RaftCommittedEntryListener raftCommittedEntryListener) {\n+ final RaftApplicationEntryCommittedPositionListener\n+ raftApplicationEntryCommittedPositionListener) {\n servers.forEach(\n- (id, raft) -> raft.getContext().addCommittedEntryListener(raftCommittedEntryListener));\n+ (id, raft) ->\n+ raft.getContext()\n+ .addCommittedEntryListener(raftApplicationEntryCommittedPositionListener));\n }\n \n public void partition(final RaftServer follower) {\ndiff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\nindex a61571f..6c082d7 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/AsyncSnapshotDirector.java\n@@ -7,8 +7,7 @@\n */\n package io.camunda.zeebe.broker.system.partitions.impl;\n \n-import io.atomix.raft.RaftCommittedEntryListener;\n-import io.atomix.raft.storage.log.IndexedRaftLogEntry;\n+import io.atomix.raft.RaftApplicationEntryCommittedPositionListener;\n import io.camunda.zeebe.broker.system.partitions.NoEntryAtSnapshotPosition;\n import io.camunda.zeebe.broker.system.partitions.StateController;\n import io.camunda.zeebe.logstreams.impl.Loggers;\n@@ -36,7 +35,7 @@ import java.util.function.Consumer;\n import org.slf4j.Logger;\n \n public final class AsyncSnapshotDirector extends Actor\n- implements RaftCommittedEntryListener, HealthMonitorable {\n+ implements RaftApplicationEntryCommittedPositionListener, HealthMonitorable {\n \n public static final Duration MINIMUM_SNAPSHOT_PERIOD = Duration.ofMinutes(1);\n \n@@ -115,7 +114,7 @@ public final class AsyncSnapshotDirector extends Actor\n @Override\n protected void handleFailure(final Throwable failure) {\n LOG.error(\n- \"No snapshot was taken due to failure in '{}'. Will try to take snapshot after snapshot period {}. {}\",\n+ \"No snapshot was taken due to failure in '{}'. Will try to take snapshot after snapshot period {}.\",\n actorName,\n snapshotRate,\n failure);\n@@ -407,13 +406,8 @@ public final class AsyncSnapshotDirector extends Actor\n }\n \n @Override\n- public void onCommit(final IndexedRaftLogEntry indexedRaftLogEntry) {\n- // is called by the Leader Role and gives the last committed entry, where we\n- // can extract the highest position, which corresponds to the last committed position\n- if (indexedRaftLogEntry.isApplicationEntry()) {\n- final var committedPosition = indexedRaftLogEntry.getApplicationEntry().highestPosition();\n- newPositionCommitted(committedPosition);\n- }\n+ public void onCommit(final long committedPosition) {\n+ newPositionCommitted(committedPosition);\n }\n \n public void newPositionCommitted(final long currentCommitPosition) {\n", "diff --git a/.circleci/config.yml b/.circleci/config.yml\nindex f8a53ba..c378c7e 100644\n--- a/.circleci/config.yml\n+++ b/.circleci/config.yml\n@@ -336,7 +336,7 @@ jobs:\n # Disabling for now, and tracked further investigations\n # in https://github.com/influxdata/k8s-idpe/issues/3038\n docker_layer_caching: false\n- version: 19.03.14\n+ version: 20.10.7\n - run: |\n sudo apt-get update\n sudo apt-get install -y docker.io\n@@ -355,7 +355,7 @@ jobs:\n BRANCH=$(git rev-parse --abbrev-ref HEAD | tr '/' '.')\n COMMIT_SHA=$(git rev-parse --short HEAD)\n docker build -t quay.io/influxdb/iox:$COMMIT_SHA -t quay.io/influxdb/iox:main -f docker/Dockerfile.iox .\n- docker push quay.io/influxdb/iox\n+ docker push --all-tags quay.io/influxdb/iox\n echo \"export COMMIT_SHA=${COMMIT_SHA}\" >> $BASH_ENV\n - run:\n name: Deploy tags\ndiff --git a/Dockerfile b/Dockerfile\nindex 8c23ea2..1df1fd2 100644\n--- a/Dockerfile\n+++ b/Dockerfile\n@@ -17,7 +17,7 @@ RUN \\\n cp /influxdb_iox/target/release/influxdb_iox /root/influxdb_iox && \\\n du -cshx /usr/local/cargo/registry /usr/local/cargo/git /influxdb_iox/target\n \n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates --no-install-recommends \\\ndiff --git a/docker/Dockerfile.ci b/docker/Dockerfile.ci\nindex db0a8ca..cf9cd15 100644\n--- a/docker/Dockerfile.ci\n+++ b/docker/Dockerfile.ci\n@@ -12,7 +12,7 @@\n \n ARG RUST_VERSION\n # Build actual image used for CI pipeline\n-FROM rust:${RUST_VERSION}-slim-buster\n+FROM rust:${RUST_VERSION}-slim-bullseye\n \n # When https://github.com/rust-lang/rustup/issues/2686 is fixed, run the command added that\n # will install everything in rust-toolchain.toml here so that components are in the container\n@@ -42,7 +42,7 @@ COPY docker/redpanda.gpg /tmp/redpanda.gpg\n # Generated from https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/cfg/setup/bash.deb.sh\n RUN apt-key add /tmp/redpanda.gpg \\\n && rm /tmp/redpanda.gpg \\\n- && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=buster&version=10&arch=x86_64\" \\\n+ && curl ${CURL_FLAGS} \"https://packages.vectorized.io/nzc4ZYQK3WRGd9sy/redpanda/config.deb.txt?distro=debian&codename=bullseye&version=10&arch=x86_64\" \\\n > /etc/apt/sources.list.d/vectorized-redpanda.list \\\n && apt-get update \\\n && apt-get install -y redpanda \\\ndiff --git a/docker/Dockerfile.iox b/docker/Dockerfile.iox\nindex 42414db..ae1f38e 100644\n--- a/docker/Dockerfile.iox\n+++ b/docker/Dockerfile.iox\n@@ -1,7 +1,7 @@\n ###\n # Dockerfile used for deploying IOx\n ##\n-FROM debian:buster-slim\n+FROM debian:bullseye-slim\n \n RUN apt-get update \\\n && apt-get install -y libssl1.1 libgcc1 libc6 ca-certificates gettext-base --no-install-recommends \\\n", "diff --git a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\nindex 0c539b9..ffaead1 100644\n--- a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n@@ -334,22 +334,31 @@ public class InterruptingEventSubprocessTest {\n \"timer-event-subprocess\",\n s -> s.startEvent(\"other-timer\").timerWithDuration(\"P1D\").endEvent());\n \n- final long wfInstanceKey = createInstanceAndTriggerEvent(workflow(eventSubprocess));\n+ final long wfInstanceKey = createInstanceAndWaitForTask(workflow(eventSubprocess));\n+\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .withWorkflowInstanceKey(wfInstanceKey)\n+ .withMessageName(\"other-message\")\n+ .await();\n+\n+ triggerEventSubprocess.accept(wfInstanceKey);\n \n // then\n assertThat(\n- RecordingExporter.messageSubscriptionRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .messageSubscriptionRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withMessageName(\"other-message\")\n- .limit(4))\n+ .withMessageName(\"other-message\"))\n .extracting(Record::getIntent)\n .contains(MessageSubscriptionIntent.CLOSED);\n \n assertThat(\n- RecordingExporter.timerRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .timerRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withHandlerNodeId(\"other-timer\")\n- .limit(4))\n+ .withHandlerNodeId(\"other-timer\"))\n .extracting(Record::getIntent)\n .contains(TimerIntent.CANCELED);\n }\ndiff --git a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\nindex 8576be5..50040f4 100644\n--- a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n@@ -71,7 +71,7 @@ public final class EngineRule extends ExternalResource {\n \n private static final int PARTITION_ID = Protocol.DEPLOYMENT_PARTITION;\n private static final RecordingExporter RECORDING_EXPORTER = new RecordingExporter();\n- private StreamProcessorRule environmentRule;\n+ private final StreamProcessorRule environmentRule;\n private final RecordingExporterTestWatcher recordingExporterTestWatcher =\n new RecordingExporterTestWatcher();\n private final int partitionCount;\n@@ -80,7 +80,7 @@ public final class EngineRule extends ExternalResource {\n \n private final Int2ObjectHashMap<SubscriptionCommandMessageHandler> subscriptionHandlers =\n new Int2ObjectHashMap<>();\n- private final ExecutorService subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+ private ExecutorService subscriptionHandlerExecutor;\n \n private EngineRule(final int partitionCount) {\n this(partitionCount, false);\n@@ -115,6 +115,8 @@ public final class EngineRule extends ExternalResource {\n \n @Override\n protected void before() {\n+ subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+\n if (!explicitStart) {\n startProcessors();\n }\n@@ -123,7 +125,6 @@ public final class EngineRule extends ExternalResource {\n @Override\n protected void after() {\n subscriptionHandlerExecutor.shutdown();\n- environmentRule = null;\n subscriptionHandlers.clear();\n }\n \ndiff --git a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\nindex 0f3da21..af6c50e 100755\n--- a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n@@ -248,6 +248,7 @@ public final class StreamProcessorRule implements TestRule {\n @Override\n protected void after() {\n streams = null;\n+ streamProcessingComposite = null;\n }\n }\n \ndiff --git a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\nindex 293df93..a3ede18 100644\n--- a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n+++ b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n@@ -81,4 +81,9 @@ public final class RecordStream extends ExporterRecordStream<RecordValue, Record\n return new IncidentRecordStream(\n filter(r -> r.getValueType() == ValueType.INCIDENT).map(Record.class::cast));\n }\n+\n+ public MessageSubscriptionRecordStream messageSubscriptionRecords() {\n+ return new MessageSubscriptionRecordStream(\n+ filter(r -> r.getValueType() == ValueType.MESSAGE_SUBSCRIPTION).map(Record.class::cast));\n+ }\n }\n"]
5
["d8d0ba8ea17ed43a04f90213851d2f27056d8cf0", "cbce777addb3dd118232a9f28db9d425d4c937b2", "323cf81961cdd3748a7ba6ba470ecb13e5374e9f", "640cd88df3069a97d8244398414338dd317c5470", "00be00f2dd0ba7e4bfa4f5dfb74135761f5f86ec"]
["feat", "fix", "refactor", "ci", "test"]
serialize access to StreamObserver,fix `memtable` docstrings,add tests for ProfilePage methods,switch to callback ref,fix deploy
["diff --git a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\nindex ae2b1c0..8ed64e5 100644\n--- a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n+++ b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n@@ -22,6 +22,7 @@ import io.camunda.zeebe.transport.stream.api.ClientStreamer;\n import io.camunda.zeebe.util.VisibleForTesting;\n import io.grpc.Status;\n import io.grpc.StatusRuntimeException;\n+import io.grpc.internal.SerializingExecutor;\n import io.grpc.stub.ServerCallStreamObserver;\n import io.grpc.stub.StreamObserver;\n import java.util.concurrent.CompletableFuture;\n@@ -83,12 +84,12 @@ public class ClientStreamAdapter {\n @VisibleForTesting(\"Allow unit testing behavior job handling behavior\")\n static final class ClientStreamConsumerImpl implements ClientStreamConsumer {\n private final StreamObserver<ActivatedJob> responseObserver;\n- private final Executor executor;\n+ private final SerializingExecutor executor;\n \n public ClientStreamConsumerImpl(\n final StreamObserver<ActivatedJob> responseObserver, final Executor executor) {\n this.responseObserver = responseObserver;\n- this.executor = executor;\n+ this.executor = new SerializingExecutor(executor);\n }\n \n @Override\n", "diff --git a/ibis/expr/api.py b/ibis/expr/api.py\nindex 93fabaa..66a2ea9 100644\n--- a/ibis/expr/api.py\n+++ b/ibis/expr/api.py\n@@ -403,15 +403,21 @@ def memtable(\n >>> import ibis\n >>> t = ibis.memtable([{\"a\": 1}, {\"a\": 2}])\n >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ a\n+ 0 1\n+ 1 2\n \n >>> t = ibis.memtable([{\"a\": 1, \"b\": \"foo\"}, {\"a\": 2, \"b\": \"baz\"}])\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n \n Create a table literal without column names embedded in the data and pass\n `columns`\n@@ -420,10 +426,22 @@ def memtable(\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n+\n+ Create a table literal without column names embedded in the data. Ibis\n+ generates column names if none are provided.\n+\n+ >>> t = ibis.memtable([(1, \"foo\"), (2, \"baz\")])\n+ >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ col0 col1\n+ 0 1 foo\n+ 1 2 baz\n \"\"\"\n if columns is not None and schema is not None:\n raise NotImplementedError(\n", "diff --git a/client/src/components/Profile/PreScreeningIviewCard.tsx b/client/src/components/Profile/PreScreeningIviewCard.tsx\nindex f84392a..2031203 100644\n--- a/client/src/components/Profile/PreScreeningIviewCard.tsx\n+++ b/client/src/components/Profile/PreScreeningIviewCard.tsx\n@@ -27,7 +27,7 @@ type State = {\n isPreScreeningIviewModalVisible: boolean;\n };\n \n-class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n+class PreScreeningIviewsCard extends React.PureComponent<Props, State> {\n state = {\n courseIndex: 0,\n isPreScreeningIviewModalVisible: false,\n@@ -98,4 +98,4 @@ class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n }\n }\n \n-export default CoreJSIviewsCard;\n+export default PreScreeningIviewsCard;\ndiff --git a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\nindex 7b73c3f..54b378c 100644\n--- a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n+++ b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n@@ -1,7 +1,7 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n exports[`PreScreeningIviewCard Should render correctly 1`] = `\n-<CoreJSIviewsCard\n+<PreScreeningIviewsCard\n data={\n Array [\n Object {\n@@ -3015,5 +3015,5 @@ exports[`PreScreeningIviewCard Should render correctly 1`] = `\n </div>\n </Card>\n </CommonCard>\n-</CoreJSIviewsCard>\n+</PreScreeningIviewsCard>\n `;\ndiff --git a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\nindex 079d966..95f3e49 100644\n--- a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n+++ b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n@@ -4,7 +4,6 @@ import { shallowToJson } from 'enzyme-to-json';\n import { NextRouter } from 'next/router';\n import { Session } from 'components/withSession';\n import { ProfilePage } from '../index';\n-// import { GeneralInfo } from '../../../../../common/models/profile';\n \n jest.mock('next/config', () => () => ({}));\n jest.mock('services/user', () => ({\n@@ -12,80 +11,378 @@ jest.mock('services/user', () => ({\n getProfileInfo() {\n return jest.fn();\n }\n+ saveProfileInfo() {\n+ return jest.fn();\n+ }\n },\n }),\n );\n \n-describe('ProfilePage', () => {\n- const profile = {\n- generalInfo: {\n- name: 'Dzmitry Petrov',\n- githubId: 'petrov',\n- aboutMyself: 'Test',\n+const profile = {\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: false, all: false },\n+ isEmailVisible: { student: false, all: false },\n+ isTelegramVisible: { student: false, all: false },\n+ isSkypeVisible: { student: false, all: false },\n+ isPhoneVisible: { student: false, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ generalInfo: {\n+ aboutMyself: 'Test',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'a2+',\n+ locationId: 456,\n+ locationName: 'Brest',\n+ },\n+ contacts: {},\n+ mentorStats: [\n+ {},\n+ ],\n+ studentStats: [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n locationName: 'Minsk',\n- locationId: '1',\n- educationHistory: null,\n- englishLevel: 'a2+',\n- },\n- permissionsSettings: {\n- isProfileVisible: { all: true },\n- isAboutVisible: { mentor: true, student: false, all: false },\n- isEducationVisible: { mentor: true, student: false, all: false },\n- isEnglishVisible: { student: false, all: false },\n- isEmailVisible: { student: false, all: false },\n- isTelegramVisible: { student: false, all: false },\n- isSkypeVisible: { student: false, all: false },\n- isPhoneVisible: { student: false, all: false },\n- isContactsNotesVisible: { student: true, all: false },\n- isLinkedInVisible: { mentor: true, student: false, all: false },\n- isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n- isMentorStatsVisible: { mentor: true, student: true, all: false },\n- isStudentStatsVisible: { student: false, all: false },\n- },\n- contacts: {\n- phone: '+375292123456',\n- email: '[email protected]',\n- skype: 'petro:live',\n- telegram: 'petro',\n- notes: 'discord: @petro, instagram: @petro12',\n- },\n- isPermissionsSettingsChanged: true,\n- isProfileSettingsChanged: true,\n- };\n- const session = {\n- id: 2020,\n- githubId: 'mikhama',\n- isAdmin: true,\n- isHirer: false,\n- isActivist: false,\n- roles: {\n- 1: 'mentor',\n- 2: 'student',\n- 11: 'mentor',\n- },\n- coursesRoles: {\n- 13: [\n- 'manager',\n+ tasks: [\n+ {\n+ interviewFormAnswers: {},\n+ },\n ],\n },\n- } as Session;\n- const router = {\n- query: {\n- githubId: 'petrov',\n- },\n- asPath: '/#edit/',\n- } as unknown as NextRouter;\n+ ],\n+ publicFeedback: [\n+ {},\n+ ],\n+ stageInterviewFeedback: [\n+ {},\n+ ],\n+};\n+const session = {\n+ id: 2020,\n+ githubId: 'mikhama',\n+ isAdmin: true,\n+ isHirer: false,\n+ isActivist: false,\n+ roles: {\n+ 1: 'mentor',\n+ 2: 'student',\n+ 11: 'mentor',\n+ },\n+ coursesRoles: {\n+ 13: [\n+ 'manager',\n+ ],\n+ },\n+} as Session;\n+const router = {\n+ query: {\n+ githubId: 'petrov',\n+ },\n+ asPath: '/#edit/',\n+} as unknown as NextRouter;\n+const state = {\n+ profile,\n+ isInitialPermissionsSettingsChanged: false,\n+ isInitialProfileSettingsChanged: false,\n+};\n \n+describe('ProfilePage', () => {\n describe('Should render correctly', () => {\n- it('if full info about profile is in the state', () => {\n+ it('if full profile info is in the state', () => {\n const wrapper = shallow(\n <ProfilePage\n session={session}\n router={router}\n />,\n );\n- wrapper.setState({ profile });\n+ wrapper.setState(state);\n expect(shallowToJson(wrapper)).toMatchSnapshot();\n });\n });\n+\n+ const wrapper = shallow(\n+ <ProfilePage\n+ session={session}\n+ router={router}\n+ />,\n+ );\n+ const instance = wrapper.instance();\n+ describe('onPermissionsSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if permissions for student role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEmailVisible',\n+ role: 'student',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEmailVisible).toEqual({\n+ student: true, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for mentor role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: false,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isLinkedInVisible',\n+ role: 'mentor',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isLinkedInVisible).toEqual({\n+ mentor: false, student: false, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for all roles were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEducationVisible',\n+ role: 'all',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEducationVisible).toEqual({\n+ mentor: true, student: true, all: true,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('onProfileSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if \"profile.generalInfo.location\" was changed', async () => {\n+ const event = {\n+ id: 123,\n+ name: 'Minsk',\n+ }\n+ const path = 'generalInfo.location';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.locationId).toBe(123);\n+ expect(wrapper.state().profile.generalInfo.locationName).toBe('Minsk');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if \"profile.generalInfo.englishLevel\" was changed', async () => {\n+ const event = 'b2+';\n+ const path = 'generalInfo.englishLevel';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.englishLevel).toBe('b2+');\n+ });\n+ it('if field added to \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'add',\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([\n+ {\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ },\n+ {\n+ graduationYear: null,\n+ faculty: null,\n+ university: null,\n+ },\n+ ]);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if field deleted from \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'delete',\n+ index: 0,\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([]);\n+ });\n+ it('if some other field was changed', async () => {\n+ const event = {\n+ target: {\n+ value: 'Hello everyone, my name is Mike.',\n+ }\n+ };\n+ const path = 'generalInfo.aboutMyself';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.aboutMyself).toEqual('Hello everyone, my name is Mike.');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('changeProfilePageMode', () => {\n+ describe('Should set state correctly', () => {\n+ it('if mode = \"edit\" was passed', async () => {\n+ const mode = 'edit';\n+ wrapper.setState({ ...state, isEditingModeEnabled: false });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ });\n+ it('if mode = \"view\" was passed', async () => {\n+ const mode = 'view';\n+ wrapper.setState({ ...state, isEditingModeEnabled: true });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ });\n+ });\n+ });\n+ describe('saveProfile', () => {\n+ it('Should set state correctly', async () => {\n+ const profile = {\n+ generalInfo: {\n+ aboutMyself: 'Hello',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'c1',\n+ locationId: 778,\n+ locationName: 'Hrodna',\n+ },\n+ contacts: {\n+ telegram: 'test',\n+ },\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: true, all: true },\n+ isEmailVisible: { student: true, all: true },\n+ isTelegramVisible: { student: true, all: true },\n+ isSkypeVisible: { student: true, all: false },\n+ isPhoneVisible: { student: true, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ };\n+ wrapper.setState({\n+ ...state,\n+ profile,\n+ isInitialPermissionsSettingsChanged: true,\n+ isInitialProfileSettingsChanged: true,\n+ });\n+ await instance.saveProfile();\n+ expect(wrapper.state().isSaving).toBe(false);\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(false);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(false);\n+ expect(wrapper.state().initialPermissionsSettings).toEqual(profile.permissionsSettings);\n+ expect(wrapper.state().initialProfileSettings).toEqual(profile);\n+ });\n+ });\n+ describe('hadStudentCoreJSInterview', () => {\n+ describe('Should return', () => {\n+ it('\"true\" if student has an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(true);\n+ });\n+ it('\"false\" if student has not an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(false);\n+ });\n+ });\n+ });\n+ describe('getStudentCoreJSInterviews', () => {\n+ it('Should return info about CoreJS interviews', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.getStudentCoreJSInterviews(studentStats);\n+ expect(result).toEqual([\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ interview: {\n+ answers: {},\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ },\n+ locationName: 'Minsk',\n+ },\n+ ]);\n+ });\n+ });\n });\ndiff --git a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\nindex fbd133c..729b2de 100644\n--- a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n+++ b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n@@ -1,6 +1,6 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n-exports[`ProfilePage Should render correctly if 1`] = `\n+exports[`ProfilePage Should render correctly if full profile info is in the state 1`] = `\n <Fragment>\n <LoadingScreen\n show={true}\n@@ -50,12 +50,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -135,12 +139,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -220,12 +228,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -305,12 +317,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -387,15 +403,7 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n >\n <ContactsCard\n- data={\n- Object {\n- \"email\": \"[email protected]\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n+ data={Object {}}\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n onProfileSettingsChange={[Function]}\n@@ -461,84 +469,22 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n />\n </div>\n- </Masonry>\n- <JSXStyle\n- id=\"3803498300\"\n- >\n- div.jsx-3803498300{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-left:-16px;width:auto;}\n- </JSXStyle>\n- <JSXStyle\n- id=\"110195169\"\n- >\n- div.jsx-110195169{padding-left:16px;background-clip:padding-box;}\n- </JSXStyle>\n- </div>\n- </Spin>\n- </LoadingScreen>\n-</Fragment>\n-`;\n-\n-exports[`ProfilePage Should render correctly if full info about profile is in the state 1`] = `\n-<Fragment>\n- <LoadingScreen\n- show={true}\n- >\n- <Header\n- isProfileEditingModeEnabled={false}\n- isProfilePage={false}\n- isSaveButtonVisible={false}\n- onChangeProfilePageMode={[Function]}\n- onSaveClick={[Function]}\n- username=\"mikhama\"\n- />\n- <Spin\n- delay={200}\n- size=\"default\"\n- spinning={false}\n- wrapperClassName=\"\"\n- >\n- <div\n- style={\n- Object {\n- \"padding\": 10,\n- }\n- }\n- >\n- <Masonry\n- breakpointCols={\n- Object {\n- \"1100\": 3,\n- \"500\": 1,\n- \"700\": 2,\n- \"default\": 4,\n- }\n- }\n- className=\"jsx-3803498300\"\n- columnClassName=\"jsx-110195169\"\n- >\n <div\n- key=\"card-0\"\n+ key=\"card-5\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <MainCard\n+ <PublicFeedbackCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -602,28 +548,30 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-1\"\n+ key=\"card-6\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <AboutCard\n+ <StudentStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"locationName\": \"Minsk\",\n+ \"tasks\": Array [\n+ Object {\n+ \"interviewFormAnswers\": Object {},\n+ },\n+ ],\n+ },\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -687,28 +635,21 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-2\"\n+ key=\"card-7\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EnglishCard\n+ <MentorStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -772,170 +713,44 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-3\"\n+ key=\"card-8\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EducationCard\n+ <CoreJSIviewsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"interview\": Object {\n+ \"answers\": Object {},\n+ \"comment\": undefined,\n+ \"interviewer\": undefined,\n+ \"score\": undefined,\n+ },\n+ \"locationName\": \"Minsk\",\n+ },\n+ ]\n }\n />\n </div>\n <div\n- key=\"card-4\"\n+ key=\"card-9\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <ContactsCard\n+ <PreScreeningIviewsCard\n data={\n- Object {\n- \"email\": \"[email protected]\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n />\n </div>\n", "diff --git a/src/notebook/components/transforms/html.js b/src/notebook/components/transforms/html.js\nindex 83fc1fb..021cc65 100644\n--- a/src/notebook/components/transforms/html.js\n+++ b/src/notebook/components/transforms/html.js\n@@ -8,16 +8,16 @@ type Props = {\n \n export default class HTMLDisplay extends React.Component {\n props: Props;\n+ el: HTMLElement;\n \n componentDidMount(): void {\n- if (this.refs.here) {\n- if (document.createRange && Range && Range.prototype.createContextualFragment) {\n- const range = document.createRange();\n- const fragment = range.createContextualFragment(this.props.data);\n- ReactDOM.findDOMNode(this.refs.here).appendChild(fragment);\n- } else {\n- ReactDOM.findDOMNode(this.refs.here).innerHTML = this.props.data;\n- }\n+ // Create a range to ensure that scripts are invoked from within the HTML\n+ if (document.createRange && Range && Range.prototype.createContextualFragment) {\n+ const range = document.createRange();\n+ const fragment = range.createContextualFragment(this.props.data);\n+ this.el.appendChild(fragment);\n+ } else {\n+ this.el.innerHTML = this.props.data;\n }\n }\n \n@@ -27,7 +27,7 @@ export default class HTMLDisplay extends React.Component {\n \n render(): ?React.Element<any> {\n return (\n- <div ref=\"here\" />\n+ <div ref={(el) => { this.el = el; }} />\n );\n }\n }\n", "diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml\nindex 3830f4c..3b14ee5 100644\n--- a/.github/workflows/deploy.yaml\n+++ b/.github/workflows/deploy.yaml\n@@ -67,7 +67,7 @@ jobs:\n run: aws s3 cp .next/static s3://cdn.rs.school/_next/static/ --recursive --cache-control \"public,max-age=15552000,immutable\"\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -117,7 +117,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -167,7 +167,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n"]
5
["22044d58302513f5cf22b06151c4a367bbb88f6e", "72bc0f5172c0a3d17bde29cfc00db4c60d2fee3a", "11ffd5174bd61a2939ae58d2b2d43284302ae490", "ee4bf61fb8836e249fb4ef3507dc938e70696b3f", "7785be09053049b30cf41b420c59f051cd0129fc"]
["fix", "docs", "test", "refactor", "ci"]
Fix windows build,create DashboardDetails,remove broken link Fixes #1785,Fix Cube Store build on Windows,remove members that are left from ClusterTopology when last change is applied When the change is applied, the member is marked as LEFT. It is removed from the ClusterTopology when all changes in the ClusterChangePlan is completed.
["diff --git a/src/fs/mounts/mod.rs b/src/fs/mounts/mod.rs\nindex a7f8188..662e2f5 100644\n--- a/src/fs/mounts/mod.rs\n+++ b/src/fs/mounts/mod.rs\n@@ -29,11 +29,14 @@ impl std::error::Error for Error {}\n \n impl std::fmt::Display for Error {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n+ // Allow unreachable_patterns for windows build\n+ #[allow(unreachable_patterns)]\n match self {\n #[cfg(target_os = \"macos\")]\n Error::GetFSStatError(err) => write!(f, \"getfsstat failed: {err}\"),\n #[cfg(target_os = \"linux\")]\n- Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\")\n+ Error::IOError(err) => write!(f, \"failed to read /proc/mounts: {err}\"),\n+ _ => write!(f, \"Unknown error\"),\n }\n }\n }\n\\ No newline at end of file\ndiff --git a/src/main.rs b/src/main.rs\nindex 483e14d..ca28081 100644\n--- a/src/main.rs\n+++ b/src/main.rs\n@@ -62,6 +62,8 @@ mod theme;\n // to `clap` is complete.\n lazy_static! {\n static ref ALL_MOUNTS: HashMap<PathBuf, mounts::MountedFs> = {\n+ // Allow unused_mut for windows\n+ #[allow(unused_mut)]\n let mut mount_map: HashMap<PathBuf, mounts::MountedFs> = HashMap::new();\n \n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n", "diff --git a/client/src/components/MentorSearch.tsx b/client/src/components/MentorSearch.tsx\nindex 622560a..06f0114 100644\n--- a/client/src/components/MentorSearch.tsx\n+++ b/client/src/components/MentorSearch.tsx\n@@ -7,8 +7,9 @@ type Props = UserProps & {\n };\n \n export function MentorSearch(props: Props) {\n- const courseService = useMemo(() => new CourseService(props.courseId), [props.courseId]);\n+ const { courseId, ...otherProps } = props;\n+ const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const handleSearch = useCallback(async (value: string) => courseService.searchMentors(value), [courseService]);\n \n- return <UserSearch {...props} searchFn={handleSearch} />;\n+ return <UserSearch {...otherProps} searchFn={handleSearch} />;\n }\ndiff --git a/client/src/components/Student/DashboardDetails.tsx b/client/src/components/Student/DashboardDetails.tsx\nnew file mode 100644\nindex 0000000..30506ef\n--- /dev/null\n+++ b/client/src/components/Student/DashboardDetails.tsx\n@@ -0,0 +1,89 @@\n+import { BranchesOutlined, CloseCircleTwoTone, SolutionOutlined, UndoOutlined } from '@ant-design/icons';\n+import { Button, Descriptions, Drawer } from 'antd';\n+import { CommentModal, MentorSearch } from 'components';\n+import { useState } from 'react';\n+import { StudentDetails } from 'services/course';\n+import { MentorBasic } from '../../../../common/models';\n+import css from 'styled-jsx/css';\n+\n+type Props = {\n+ details: StudentDetails | null;\n+ courseId: number;\n+ onClose: () => void;\n+ onCreateRepository: () => void;\n+ onRestoreStudent: () => void;\n+ onExpelStudent: (comment: string) => void;\n+ onIssueCertificate: () => void;\n+ onUpdateMentor: (githubId: string) => void;\n+};\n+\n+export function DashboardDetails(props: Props) {\n+ const [expelMode, setExpelMode] = useState(false);\n+ const { details } = props;\n+ if (details == null) {\n+ return null;\n+ }\n+ return (\n+ <>\n+ <Drawer\n+ width={600}\n+ title={`${details.name} , ${details.githubId}`}\n+ placement=\"right\"\n+ closable={false}\n+ onClose={props.onClose}\n+ visible={!!details}\n+ >\n+ <div className=\"student-details-actions\">\n+ <Button\n+ disabled={!details.isActive || !!details.repository}\n+ icon={<BranchesOutlined />}\n+ onClick={props.onCreateRepository}\n+ >\n+ Create Repository\n+ </Button>\n+ <Button disabled={!details.isActive} icon={<SolutionOutlined />} onClick={props.onIssueCertificate}>\n+ Issue Certificate\n+ </Button>\n+ <Button\n+ hidden={!details.isActive}\n+ icon={<CloseCircleTwoTone twoToneColor=\"red\" />}\n+ onClick={() => setExpelMode(true)}\n+ >\n+ Expel\n+ </Button>\n+ <Button hidden={details.isActive} icon={<UndoOutlined />} onClick={props.onRestoreStudent}>\n+ Restore\n+ </Button>\n+ <Descriptions bordered layout=\"vertical\" size=\"small\" column={1}>\n+ <Descriptions.Item label=\"Mentor\">\n+ <MentorSearch\n+ style={{ width: '100%' }}\n+ onChange={props.onUpdateMentor}\n+ courseId={props.courseId}\n+ keyField=\"githubId\"\n+ value={(details.mentor as MentorBasic)?.githubId}\n+ defaultValues={details.mentor ? [details.mentor as any] : []}\n+ />\n+ </Descriptions.Item>\n+ </Descriptions>\n+ </div>\n+ </Drawer>\n+ <CommentModal\n+ title=\"Expelling Reason\"\n+ visible={expelMode}\n+ onCancel={() => setExpelMode(false)}\n+ onOk={(text: string) => {\n+ props.onExpelStudent(text);\n+ setExpelMode(false);\n+ }}\n+ />\n+ <style jsx>{styles}</style>\n+ </>\n+ );\n+}\n+\n+const styles = css`\n+ .student-details-actions :global(.ant-btn) {\n+ margin: 0 8px 8px 0;\n+ }\n+`;\ndiff --git a/client/src/components/Student/index.ts b/client/src/components/Student/index.ts\nindex 71e28de..076f0e2 100644\n--- a/client/src/components/Student/index.ts\n+++ b/client/src/components/Student/index.ts\n@@ -1 +1,2 @@\n export { default as AssignStudentModal } from './AssignStudentModal';\n+export { DashboardDetails } from './DashboardDetails';\ndiff --git a/client/src/components/StudentSearch.tsx b/client/src/components/StudentSearch.tsx\nindex 5952aed..7c14263 100644\n--- a/client/src/components/StudentSearch.tsx\n+++ b/client/src/components/StudentSearch.tsx\n@@ -7,8 +7,9 @@ type Props = UserProps & {\n };\n \n export function StudentSearch(props: Props) {\n- const courseService = useMemo(() => new CourseService(props.courseId), [props.courseId]);\n+ const { courseId, ...otherProps } = props;\n+ const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const handleSearch = useCallback(async (value: string) => courseService.searchStudents(value), [courseService]);\n \n- return <UserSearch {...props} searchFn={handleSearch} />;\n+ return <UserSearch {...otherProps} searchFn={handleSearch} />;\n }\ndiff --git a/client/src/components/UserSearch.tsx b/client/src/components/UserSearch.tsx\nindex ff95941..4075827 100644\n--- a/client/src/components/UserSearch.tsx\n+++ b/client/src/components/UserSearch.tsx\n@@ -14,7 +14,7 @@ export type UserProps = SelectProps<string> & {\n \n export function UserSearch(props: UserProps) {\n const [data, setData] = useState<Person[]>([]);\n- const { searchFn = defaultSearch, defaultValues } = props;\n+ const { searchFn = defaultSearch, defaultValues, keyField, ...otherProps } = props;\n \n useEffect(() => {\n setData(defaultValues ?? []);\n@@ -29,7 +29,6 @@ export function UserSearch(props: UserProps) {\n }\n };\n \n- const { keyField, ...otherProps } = props;\n return (\n <Select\n {...otherProps}\ndiff --git a/client/src/pages/course/admin/students.tsx b/client/src/pages/course/admin/students.tsx\nindex c15dee4..220ef54 100644\n--- a/client/src/pages/course/admin/students.tsx\n+++ b/client/src/pages/course/admin/students.tsx\n@@ -2,15 +2,13 @@ import {\n BranchesOutlined,\n CheckCircleTwoTone,\n ClockCircleTwoTone,\n- CloseCircleTwoTone,\n FileExcelOutlined,\n MinusCircleOutlined,\n- SolutionOutlined,\n- UndoOutlined,\n } from '@ant-design/icons';\n-import { Button, Drawer, message, Row, Statistic, Switch, Table, Typography, Descriptions } from 'antd';\n+import { Button, message, Row, Statistic, Switch, Table, Typography } from 'antd';\n import { ColumnProps } from 'antd/lib/table/Column';\n-import { CommentModal, PageLayout, withSession, MentorSearch } from 'components';\n+import { PageLayout, withSession } from 'components';\n+import { DashboardDetails } from 'components/Student';\n import {\n boolIconRenderer,\n boolSorter,\n@@ -21,14 +19,12 @@ import {\n } from 'components/Table';\n import { useLoading } from 'components/useLoading';\n import withCourseData from 'components/withCourseData';\n+import { isCourseManager } from 'domain/user';\n import _ from 'lodash';\n import { useMemo, useState } from 'react';\n import { useAsync } from 'react-use';\n-import { isCourseManager } from 'domain/user';\n import { CourseService, StudentDetails } from 'services/course';\n import { CoursePageProps } from 'services/models';\n-import css from 'styled-jsx/css';\n-import { MentorBasic } from '../../../../../common/models';\n \n const { Text } = Typography;\n \n@@ -39,7 +35,6 @@ function Page(props: Props) {\n const courseId = props.course.id;\n \n const [loading, withLoading] = useLoading(false);\n- const [expelMode, setExpelMode] = useState(false);\n const [isManager] = useState(isCourseManager(props.session, props.course.id));\n const courseService = useMemo(() => new CourseService(courseId), [courseId]);\n const [students, setStudents] = useState([] as StudentDetails[]);\n@@ -77,7 +72,6 @@ function Page(props: Props) {\n await courseService.expelStudent(githubId, text);\n message.info('Student has been expelled');\n }\n- setExpelMode(false);\n });\n \n const restoreStudent = withLoading(async () => {\n@@ -114,59 +108,20 @@ function Page(props: Props) {\n <div>{renderToolbar()}</div>\n </Row>\n <Table rowKey=\"id\" pagination={{ pageSize: 100 }} size=\"small\" dataSource={students} columns={getColumns()} />\n- <Drawer\n- width={400}\n- title={details ? `${details.name} , ${details.githubId}` : ''}\n- placement=\"right\"\n- closable={false}\n+\n+ <DashboardDetails\n+ onUpdateMentor={updateMentor}\n+ onRestoreStudent={restoreStudent}\n+ onIssueCertificate={issueCertificate}\n+ onExpelStudent={expelStudent}\n+ onCreateRepository={createRepository}\n onClose={() => {\n setDetails(null);\n loadStudents();\n }}\n- visible={!!details}\n- >\n- <div className=\"student-details-actions\">\n- <Button\n- disabled={!details?.isActive || !!details.repository}\n- icon={<BranchesOutlined />}\n- onClick={createRepository}\n- >\n- Create Repository\n- </Button>\n- <Button disabled={!details?.isActive} icon={<SolutionOutlined />} onClick={issueCertificate}>\n- Issue Certificate\n- </Button>\n- <Button\n- hidden={!details?.isActive}\n- icon={<CloseCircleTwoTone twoToneColor=\"red\" />}\n- onClick={() => setExpelMode(true)}\n- >\n- Expel\n- </Button>\n- <Button hidden={details?.isActive} icon={<UndoOutlined />} onClick={restoreStudent}>\n- Restore\n- </Button>\n- <Descriptions bordered layout=\"vertical\" size=\"small\" column={1}>\n- <Descriptions.Item label=\"Mentor\">\n- <MentorSearch\n- style={{ width: '100%' }}\n- onChange={updateMentor}\n- courseId={props.course.id}\n- keyField=\"githubId\"\n- value={(details?.mentor as MentorBasic)?.githubId}\n- defaultValues={details?.mentor ? [details?.mentor as any] : []}\n- />\n- </Descriptions.Item>\n- </Descriptions>\n- </div>\n- </Drawer>\n- <CommentModal\n- title=\"Expelling Reason\"\n- visible={expelMode}\n- onCancel={() => setExpelMode(false)}\n- onOk={expelStudent}\n+ details={details}\n+ courseId={props.course.id}\n />\n- <style jsx>{styles}</style>\n </PageLayout>\n );\n }\n@@ -306,14 +261,4 @@ function calculateStats(students: StudentDetails[]) {\n };\n }\n \n-const styles = css`\n- :global(.rs-table-row-disabled) {\n- opacity: 0.25;\n- }\n-\n- .student-details-actions :global(.ant-btn) {\n- margin: 0 8px 8px 0;\n- }\n-`;\n-\n export default withCourseData(withSession(Page));\ndiff --git a/client/src/styles/main.css b/client/src/styles/main.css\nindex 2ccac3c..df3cc8c 100644\n--- a/client/src/styles/main.css\n+++ b/client/src/styles/main.css\n@@ -21,6 +21,10 @@ body,\n display: none;\n }\n \n+.ant-drawer-content-wrapper {\n+ max-width: 85%;\n+}\n+\n .footer-dark.ant-layout-footer {\n background: #000;\n color: #fff;\n", "diff --git a/docs/content/Caching/Caching.md b/docs/content/Caching/Caching.md\nindex d873a52..9706dda 100644\n--- a/docs/content/Caching/Caching.md\n+++ b/docs/content/Caching/Caching.md\n@@ -135,8 +135,9 @@ If nothing is found in the cache, the query is executed in the database and the \n is returned as well as updating the cache.\n \n If an existing value is present in the cache and the `refreshKey` value for\n-the query hasn't changed, the cached value will be returned. Otherwise, a\n-[query renewal](#in-memory-cache-force-query-renewal) will be performed.\n+the query hasn't changed, the cached value will be returned. Otherwise, a SQL query will be executed either against the pre-aggregations storage or the source database to populate the cache with the results and return them.\n+\n+\n \n ### Refresh Keys\n \n", "diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml\nindex a986501..db33097 100644\n--- a/.github/workflows/publish.yml\n+++ b/.github/workflows/publish.yml\n@@ -409,7 +409,7 @@ jobs:\n run: vcpkg integrate install; vcpkg install openssl:x64-windows\n - name: Instal LLVM for Windows\n if: ${{ startsWith(matrix.os, 'windows') }}\n- run: choco install -y llvm --version 9.0.1\n+ run: choco install -y --force llvm --version 9.0.1\n - name: Set Env Variables for Windows\n uses: allenevans/[email protected]\n if: ${{ startsWith(matrix.os, 'windows') }}\ndiff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml\nindex d45cdf9..8d87ed6 100644\n--- a/.github/workflows/rust.yml\n+++ b/.github/workflows/rust.yml\n@@ -158,7 +158,7 @@ jobs:\n run: vcpkg integrate install; vcpkg install openssl:x64-windows\n - name: Instal LLVM for Windows\n if: ${{ startsWith(matrix.os, 'windows') }}\n- run: choco install -y llvm --version 9.0.1\n+ run: choco install -y --force llvm --version 9.0.1\n - name: Set Env Variables for Windows\n uses: allenevans/[email protected]\n if: ${{ startsWith(matrix.os, 'windows') }}\n", "diff --git a/topology/src/main/java/io/camunda/zeebe/topology/state/ClusterTopology.java b/topology/src/main/java/io/camunda/zeebe/topology/state/ClusterTopology.java\nindex e5a111d..8ccd410 100644\n--- a/topology/src/main/java/io/camunda/zeebe/topology/state/ClusterTopology.java\n+++ b/topology/src/main/java/io/camunda/zeebe/topology/state/ClusterTopology.java\n@@ -171,7 +171,31 @@ public record ClusterTopology(\n }\n \n private ClusterTopology advance() {\n- return new ClusterTopology(version, members, changes.advance());\n+ final ClusterTopology result = new ClusterTopology(version, members, changes.advance());\n+ if (!result.hasPendingChanges()) {\n+ // The last change has been applied. Clean up the members that are marked as LEFT in the\n+ // topology. This operation will be executed in the member that executes the last operation.\n+ // This is ok because it is guaranteed that no other concurrent modification will be applied\n+ // to the topology. This is because all the operations are applied sequentially, and no\n+ // topology update will be done without adding a ClusterChangePlan.\n+ return result.gc();\n+ }\n+ return result;\n+ }\n+\n+ private ClusterTopology gc() {\n+ if (hasPendingChanges()) {\n+ throw new IllegalStateException(\n+ \"Expected to remove members that are left from the topology, but there are pending changes \"\n+ + changes);\n+ }\n+ // remove members that are marked as LEFT\n+ final var currentMembers =\n+ members().entrySet().stream()\n+ .filter(entry -> entry.getValue().state() != State.LEFT)\n+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));\n+ // Increment the version so that other members can merge by overwriting their local topology.\n+ return new ClusterTopology(version + 1, currentMembers, changes);\n }\n \n public boolean hasMember(final MemberId memberId) {\n"]
5
["81ca000c6a7e7435809081c60be37dda23458ec8", "fd5f211916c989fddc2ee5afeeb7d46e6a2f51cb", "c351088bce98594c740a39546ce3655c91554a5d", "e34bb04baac7574e67bc566d13dea72092e0cfa3", "4bfbf60653068ef17df98c021134692bd6d02939"]
["build", "feat", "docs", "ci", "refactor"]
tests should pass now Make the code more safe,fix "types" field in dist,add clean up test Add another clean up test, which verifies that the state is cleaned up after the timer (non-recurring) is triggered.,support document.html,remove unused
["diff --git a/goreleaserlib/goreleaser.go b/goreleaserlib/goreleaser.go\nindex 28ba3f4..67ba95d 100644\n--- a/goreleaserlib/goreleaser.go\n+++ b/goreleaserlib/goreleaser.go\n@@ -27,6 +27,15 @@ import (\n \tyaml \"gopkg.in/yaml.v2\"\n )\n \n+var (\n+\tnormalPadding = cli.Default.Padding\n+\tincreasedPadding = normalPadding * 2\n+)\n+\n+func init() {\n+\tlog.SetHandler(cli.Default)\n+}\n+\n var pipes = []pipeline.Piper{\n \tdefaults.Pipe{}, // load default configs\n \tgit.Pipe{}, // get and validate git repo state\n@@ -89,17 +98,15 @@ func Release(flags Flags) error {\n \t\tctx.Publish = false\n \t}\n \tctx.RmDist = flags.Bool(\"rm-dist\")\n-\tlogger, _ := log.Log.(*log.Logger)\n-\thandler, _ := logger.Handler.(*cli.Handler)\n \tfor _, pipe := range pipes {\n-\t\thandler.Padding = 3\n+\t\tcli.Default.Padding = normalPadding\n \t\tlog.Infof(\"\\033[1m%s\\033[0m\", strings.ToUpper(pipe.String()))\n-\t\thandler.Padding = 6\n+\t\tcli.Default.Padding = increasedPadding\n \t\tif err := handle(pipe.Run(ctx)); err != nil {\n \t\t\treturn err\n \t\t}\n \t}\n-\thandler.Padding = 3\n+\tcli.Default.Padding = normalPadding\n \treturn nil\n }\n \ndiff --git a/main.go b/main.go\nindex b9b961d..7ced1dd 100644\n--- a/main.go\n+++ b/main.go\n@@ -18,7 +18,7 @@ var (\n )\n \n func init() {\n-\tlog.SetHandler(lcli.New(os.Stdout))\n+\tlog.SetHandler(lcli.Default)\n }\n \n func main() {\n", "diff --git a/scripts/prepare.js b/scripts/prepare.js\nindex 4bab09b..55f459b 100644\n--- a/scripts/prepare.js\n+++ b/scripts/prepare.js\n@@ -96,7 +96,6 @@ async function prepare() {\n delete json.private\n delete json.scripts\n delete json.devDependencies\n- delete json.types\n \n // Add \"postinstall\" script for donations.\n if (/(native|core)$/.test(name))\n@@ -128,6 +127,7 @@ async function prepare() {\n else {\n json.main = json.main.replace(/^dist\\//, '')\n if (json.main.endsWith('.cjs.js')) {\n+ json.types = json.main.replace('.cjs.js', '.d.ts')\n json.module = json.main.replace('.cjs', '')\n }\n }\n", "diff --git a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\nindex d36b4c9..ca5047f 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n@@ -630,6 +630,40 @@ public final class ProcessExecutionCleanStateTest {\n }\n \n @Test\n+ public void testProcessWithTriggerTimerStartEvent() {\n+ // given\n+ final var deployment =\n+ engineRule\n+ .deployment()\n+ .withXmlResource(\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .timerWithDate(\"=now() + duration(\\\"PT15S\\\")\")\n+ .endEvent()\n+ .done())\n+ .deploy();\n+\n+ final var processDefinitionKey =\n+ deployment.getValue().getProcessesMetadata().get(0).getProcessDefinitionKey();\n+\n+ // when\n+ engineRule.awaitProcessingOf(\n+ RecordingExporter.timerRecords(TimerIntent.CREATED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .getFirst());\n+\n+ engineRule.increaseTime(Duration.ofSeconds(15));\n+\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n+ // then\n+ assertThatStateIsEmpty();\n+ }\n+\n+ @Test\n public void testProcessWithTimerStartEventRedeployment() {\n // given\n final var deployment =\n", "diff --git a/packages/nuxt3/src/builder/builder.ts b/packages/nuxt3/src/builder/builder.ts\nindex a24bd88..ecc22ef 100644\n--- a/packages/nuxt3/src/builder/builder.ts\n+++ b/packages/nuxt3/src/builder/builder.ts\n@@ -3,6 +3,7 @@ import fsExtra from 'fs-extra'\n import { debounce } from 'lodash'\n import { BundleBuilder } from 'src/webpack'\n import { Nuxt } from '../core'\n+import { DeterminedGlobals, determineGlobals } from '../utils'\n import {\n templateData,\n compileTemplates,\n@@ -15,12 +16,14 @@ import Ignore from './ignore'\n \n export class Builder {\n nuxt: Nuxt\n+ globals: DeterminedGlobals\n ignore: Ignore\n- app: NuxtApp\n templates: NuxtTemplate[]\n+ app: NuxtApp\n \n constructor (nuxt) {\n this.nuxt = nuxt\n+ this.globals = determineGlobals(nuxt.options.globalName, nuxt.options.globals)\n this.ignore = new Ignore({\n rootDir: nuxt.options.srcDir,\n ignoreArray: nuxt.options.ignore.concat(\n@@ -32,6 +35,10 @@ export class Builder {\n build () {\n return build(this)\n }\n+\n+ close () {\n+ // TODO: close watchers\n+ }\n }\n \n // Extends VueRouter\ndiff --git a/packages/nuxt3/src/builder/template.ts b/packages/nuxt3/src/builder/template.ts\nindex 63a9115..fe09f16 100644\n--- a/packages/nuxt3/src/builder/template.ts\n+++ b/packages/nuxt3/src/builder/template.ts\n@@ -11,6 +11,7 @@ export interface NuxtTemplate {\n \n export function templateData (builder) {\n return {\n+ globals: builder.globals,\n app: builder.app\n }\n }\ndiff --git a/packages/nuxt3/src/builder/watch.ts b/packages/nuxt3/src/builder/watch.ts\nindex b4d1415..d148fec 100644\n--- a/packages/nuxt3/src/builder/watch.ts\n+++ b/packages/nuxt3/src/builder/watch.ts\n@@ -38,7 +38,8 @@ export function createWatcher (\n return {\n watchAll,\n watch,\n- debug\n+ debug,\n+ close: () => watcher.close()\n }\n }\n \ndiff --git a/packages/nuxt3/src/config/options.ts b/packages/nuxt3/src/config/options.ts\nindex 5aac8ac..6e7f93c 100644\n--- a/packages/nuxt3/src/config/options.ts\n+++ b/packages/nuxt3/src/config/options.ts\n@@ -12,7 +12,7 @@ import { DefaultConfiguration, defaultNuxtConfigFile, getDefaultNuxtConfig } fro\n import { deleteProp, mergeConfigs, setProp, overrideProp, Optional } from './transformers'\n \n interface InputConfiguration {\n- appTemplatePath?: string\n+ documentPath?: string\n layoutTransition?: string | DefaultConfiguration['layoutTransition']\n loading?: true | false | DefaultConfiguration['loading']\n manifest?: {\n@@ -197,13 +197,16 @@ function normalizeConfig (_options: CliConfiguration) {\n .concat(options.extensions))\n \n // If app.html is defined, set the template path to the user template\n- if (options.appTemplatePath === undefined) {\n- options.appTemplatePath = path.resolve(options.buildDir, 'views/app.template.html')\n- if (fs.existsSync(path.join(options.srcDir, 'app.html'))) {\n- options.appTemplatePath = path.join(options.srcDir, 'app.html')\n+ if (options.documentPath === undefined) {\n+ options.documentPath = path.resolve(options.buildDir, 'views/document.template.html')\n+ const userDocumentPath = path.join(options.srcDir, 'document.html')\n+ if (fs.existsSync(userDocumentPath)) {\n+ options.documentPath = userDocumentPath\n+ } else {\n+ options.watch.push(userDocumentPath)\n }\n } else {\n- options.appTemplatePath = path.resolve(options.srcDir, options.appTemplatePath)\n+ options.documentPath = path.resolve(options.srcDir, options.documentPath)\n }\n \n overrideProp(options.build, 'publicPath', options.build.publicPath.replace(/([^/])$/, '$1/'))\ndiff --git a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\nindex 3e3ce2d..482bd6b 100644\n--- a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n+++ b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n@@ -96,6 +96,9 @@ export default class SSRRenderer extends BaseRenderer {\n // Call Vue renderer renderToString\n let APP = await this.vueRenderer.renderToString(renderContext)\n \n+ // Wrap with Nuxt id\n+ APP = `<div id=\"${this.serverContext.globals.id}\">${APP}</div>`\n+\n // Call render:done in app\n await renderContext.nuxt.hooks.callHook('vue-renderer:done')\n \ndiff --git a/packages/nuxt3/src/webpack/configs/client.ts b/packages/nuxt3/src/webpack/configs/client.ts\nindex a257948..4fb35e0 100644\n--- a/packages/nuxt3/src/webpack/configs/client.ts\n+++ b/packages/nuxt3/src/webpack/configs/client.ts\n@@ -94,7 +94,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.ssr.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: false // Resources will be injected using bundleRenderer\n })\n@@ -104,7 +104,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.spa.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: true\n })\n", "diff --git a/src/content/redux/modules/dictionaries.ts b/src/content/redux/modules/dictionaries.ts\nindex 88f7215..570d397 100644\n--- a/src/content/redux/modules/dictionaries.ts\n+++ b/src/content/redux/modules/dictionaries.ts\n@@ -3,7 +3,6 @@ import { DictID, appConfigFactory, AppConfig } from '@/app-config'\n import isEqual from 'lodash/isEqual'\n import { saveWord } from '@/_helpers/record-manager'\n import { getDefaultSelectionInfo, SelectionInfo, isSameSelection } from '@/_helpers/selection'\n-import { createActiveConfigStream } from '@/_helpers/config-manager'\n import { isContainChinese, isContainEnglish, testerPunct, isContainMinor, testerChinese, testJapanese, testKorean } from '@/_helpers/lang-check'\n import { MsgType, MsgFetchDictResult } from '@/typings/message'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\ndiff --git a/src/content/redux/modules/widget.ts b/src/content/redux/modules/widget.ts\nindex 53ad550..68e0a3d 100644\n--- a/src/content/redux/modules/widget.ts\n+++ b/src/content/redux/modules/widget.ts\n@@ -1,9 +1,9 @@\n import * as recordManager from '@/_helpers/record-manager'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\n-import appConfigFactory, { TCDirection, AppConfig, DictID } from '@/app-config'\n+import appConfigFactory, { TCDirection, DictID } from '@/app-config'\n import { message, storage } from '@/_helpers/browser-api'\n-import { createActiveConfigStream, createConfigIDListStream } from '@/_helpers/config-manager'\n-import { MsgSelection, MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n+import { createConfigIDListStream } from '@/_helpers/config-manager'\n+import { MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n import { searchText, restoreDicts } from '@/content/redux/modules/dictionaries'\n import { SelectionInfo, getDefaultSelectionInfo } from '@/_helpers/selection'\n import { Mutable } from '@/typings/helpers'\n"]
5
["5636313d7c9cfbd9f48578fd104771d65eae9720", "f14ef3809f456aadd73523e47cb16c5d15e9a9df", "aa746b764e6c54bbbd631210fce35df842d09b12", "09476134eeeb12c025618919ab9a795a680a9b30", "a50b51999015e210918d9c8e95fd4cac347353be"]
["fix", "build", "test", "feat", "refactor"]
add instruction for finding version,add ability to get all encoded values,add tests for ProfilePage methods,missing transformation for T,add canonical `_name` to edge packages
["diff --git a/.github/ISSUE_TEMPLATE/_bug_report_chs.md b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\nindex 42a2e0f..44a33db 100644\n--- a/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n+++ b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n@@ -36,7 +36,7 @@ assignees: ''\n ## \u8bbe\u5907\u4fe1\u606f\n - \u64cd\u4f5c\u7cfb\u7edf: [] <!-- \u5982 [Window10] -->\n - \u6d4f\u89c8\u5668\u7248\u672c: [] <!-- \u5982 [Chrome77] -->\n-- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] -->\n+- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] \uff08\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u67e5\u770b\uff09 -->\n \n <!-- \u8bf7\u5728\u4e0b\u65b9 ## \u5f00\u5934\u884c\u4e4b\u95f4\u7684\u7a7a\u767d\u5904\u586b\u5199 -->\n \n", "diff --git a/delorean_mem_qe/src/column.rs b/delorean_mem_qe/src/column.rs\nindex bc89cb2..b3df18e 100644\n--- a/delorean_mem_qe/src/column.rs\n+++ b/delorean_mem_qe/src/column.rs\n@@ -537,6 +537,22 @@ impl Column {\n }\n }\n \n+ /// Materialise all of the encoded values.\n+ pub fn all_encoded_values(&self) -> Vector {\n+ match self {\n+ Column::String(c) => {\n+ let now = std::time::Instant::now();\n+ let v = c.all_encoded_values();\n+ log::debug!(\"time getting all encoded values {:?}\", now.elapsed());\n+\n+ log::debug!(\"dictionary {:?}\", c.data.dictionary());\n+ Vector::Integer(v)\n+ }\n+ Column::Float(c) => Vector::Float(c.all_encoded_values()),\n+ Column::Integer(c) => Vector::Integer(c.all_encoded_values()),\n+ }\n+ }\n+\n /// Given an encoded value for a row, materialise and return the decoded\n /// version.\n ///\n@@ -986,6 +1002,10 @@ impl String {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ self.data.all_encoded_values()\n+ }\n+\n /// Return the decoded value for an encoded ID.\n ///\n /// Panics if there is no decoded value for the provided id\n@@ -1037,6 +1057,10 @@ impl Float {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<f64> {\n+ self.data.all_encoded_values()\n+ }\n+\n pub fn scan_from(&self, row_id: usize) -> &[f64] {\n self.data.scan_from(row_id)\n }\n@@ -1106,6 +1130,10 @@ impl Integer {\n self.data.encoded_values(row_ids)\n }\n \n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ self.data.all_encoded_values()\n+ }\n+\n pub fn scan_from(&self, row_id: usize) -> &[i64] {\n self.data.scan_from(row_id)\n }\ndiff --git a/delorean_mem_qe/src/encoding.rs b/delorean_mem_qe/src/encoding.rs\nindex d6a865a..4b057cf 100644\n--- a/delorean_mem_qe/src/encoding.rs\n+++ b/delorean_mem_qe/src/encoding.rs\n@@ -68,6 +68,12 @@ where\n self.values(row_ids)\n }\n \n+ /// Return all encoded values. For this encoding this is just the decoded\n+ /// values\n+ pub fn all_encoded_values(&self) -> Vec<T> {\n+ self.values.clone()\n+ }\n+\n // TODO(edd): fix this when added NULL support\n pub fn scan_from_until_some(&self, _row_id: usize) -> Option<T> {\n unreachable!(\"to remove\");\n@@ -485,6 +491,26 @@ impl DictionaryRLE {\n out\n }\n \n+ // values materialises a vector of references to all logical values in the\n+ // encoding.\n+ pub fn all_values(&mut self) -> Vec<Option<&String>> {\n+ let mut out: Vec<Option<&String>> = Vec::with_capacity(self.total as usize);\n+\n+ // build reverse mapping.\n+ let mut idx_value = BTreeMap::new();\n+ for (k, v) in &self.entry_index {\n+ idx_value.insert(v, k);\n+ }\n+ assert_eq!(idx_value.len(), self.entry_index.len());\n+\n+ for (idx, rl) in &self.run_lengths {\n+ // TODO(edd): fix unwrap - we know that the value exists in map...\n+ let v = idx_value.get(&idx).unwrap().as_ref();\n+ out.extend(iter::repeat(v).take(*rl as usize));\n+ }\n+ out\n+ }\n+\n /// Return the decoded value for an encoded ID.\n ///\n /// Panics if there is no decoded value for the provided id\n@@ -528,22 +554,13 @@ impl DictionaryRLE {\n out\n }\n \n- // values materialises a vector of references to all logical values in the\n- // encoding.\n- pub fn all_values(&mut self) -> Vec<Option<&String>> {\n- let mut out: Vec<Option<&String>> = Vec::with_capacity(self.total as usize);\n-\n- // build reverse mapping.\n- let mut idx_value = BTreeMap::new();\n- for (k, v) in &self.entry_index {\n- idx_value.insert(v, k);\n- }\n- assert_eq!(idx_value.len(), self.entry_index.len());\n+ // all_encoded_values materialises a vector of all encoded values for the\n+ // column.\n+ pub fn all_encoded_values(&self) -> Vec<i64> {\n+ let mut out: Vec<i64> = Vec::with_capacity(self.total as usize);\n \n for (idx, rl) in &self.run_lengths {\n- // TODO(edd): fix unwrap - we know that the value exists in map...\n- let v = idx_value.get(&idx).unwrap().as_ref();\n- out.extend(iter::repeat(v).take(*rl as usize));\n+ out.extend(iter::repeat(*idx as i64).take(*rl as usize));\n }\n out\n }\ndiff --git a/delorean_mem_qe/src/segment.rs b/delorean_mem_qe/src/segment.rs\nindex c058df0..f8c5005 100644\n--- a/delorean_mem_qe/src/segment.rs\n+++ b/delorean_mem_qe/src/segment.rs\n@@ -228,7 +228,7 @@ impl Segment {\n group_columns: &[String],\n aggregates: &[(String, AggregateType)],\n window: i64,\n- ) -> BTreeMap<Vec<String>, Vec<(String, Option<column::Aggregate>)>> {\n+ ) -> BTreeMap<Vec<i64>, Vec<(&String, &AggregateType, Option<column::Aggregate>)>> {\n // Build a hash table - essentially, scan columns for matching row ids,\n // emitting the encoded value for each column and track those value\n // combinations in a hashmap with running aggregates.\n@@ -242,6 +242,10 @@ impl Segment {\n assert_ne!(group_columns[group_columns.len() - 1], \"time\");\n }\n \n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\n+ // then it will be a lot faster to not build filtered_row_ids and just\n+ // get all encoded values for each grouping column...\n+\n // filter on predicates and time\n let filtered_row_ids: croaring::Bitmap;\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\n@@ -263,7 +267,12 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n+\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -325,10 +334,10 @@ impl Segment {\n .collect::<Vec<_>>();\n \n // hashMap is about 20% faster than BTreeMap in this case\n- let mut hash_table: HashMap<\n+ let mut hash_table: BTreeMap<\n Vec<i64>,\n Vec<(&String, &AggregateType, Option<column::Aggregate>)>,\n- > = HashMap::new();\n+ > = BTreeMap::new();\n \n let mut aggregate_row: Vec<(&str, Option<column::Scalar>)> =\n std::iter::repeat_with(|| (\"\", None))\n@@ -406,8 +415,10 @@ impl Segment {\n }\n processed_rows += 1;\n }\n+ // println!(\"groups: {:?}\", hash_table.len());\n log::debug!(\"({:?} rows processed) {:?}\", processed_rows, hash_table);\n BTreeMap::new()\n+ // hash_table\n }\n \n pub fn aggregate_by_group_using_sort(\n@@ -451,7 +462,11 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -557,6 +572,10 @@ impl Segment {\n assert_ne!(group_columns[group_columns.len() - 1], \"time\");\n }\n \n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\n+ // then it will be a lot faster to not build filtered_row_ids and just\n+ // get all encoded values for each grouping column...\n+\n // filter on predicates and time\n let filtered_row_ids: croaring::Bitmap;\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\n@@ -577,7 +596,11 @@ impl Segment {\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\n for group_column in group_columns {\n if let Some(column) = self.column(&group_column) {\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\n+ column.all_encoded_values()\n+ } else {\n+ column.encoded_values(&filtered_row_ids_vec)\n+ };\n assert_eq!(\n filtered_row_ids.cardinality() as usize,\n encoded_values.len()\n@@ -709,6 +732,7 @@ impl Segment {\n aggregates: group_key_aggregates,\n });\n \n+ // println!(\"groups: {:?}\", results.len());\n log::debug!(\"({:?} rows processed) {:?}\", processed_rows, results);\n // results\n vec![]\n", "diff --git a/client/src/components/Profile/PreScreeningIviewCard.tsx b/client/src/components/Profile/PreScreeningIviewCard.tsx\nindex f84392a..2031203 100644\n--- a/client/src/components/Profile/PreScreeningIviewCard.tsx\n+++ b/client/src/components/Profile/PreScreeningIviewCard.tsx\n@@ -27,7 +27,7 @@ type State = {\n isPreScreeningIviewModalVisible: boolean;\n };\n \n-class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n+class PreScreeningIviewsCard extends React.PureComponent<Props, State> {\n state = {\n courseIndex: 0,\n isPreScreeningIviewModalVisible: false,\n@@ -98,4 +98,4 @@ class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n }\n }\n \n-export default CoreJSIviewsCard;\n+export default PreScreeningIviewsCard;\ndiff --git a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\nindex 7b73c3f..54b378c 100644\n--- a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n+++ b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n@@ -1,7 +1,7 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n exports[`PreScreeningIviewCard Should render correctly 1`] = `\n-<CoreJSIviewsCard\n+<PreScreeningIviewsCard\n data={\n Array [\n Object {\n@@ -3015,5 +3015,5 @@ exports[`PreScreeningIviewCard Should render correctly 1`] = `\n </div>\n </Card>\n </CommonCard>\n-</CoreJSIviewsCard>\n+</PreScreeningIviewsCard>\n `;\ndiff --git a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\nindex 079d966..95f3e49 100644\n--- a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n+++ b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n@@ -4,7 +4,6 @@ import { shallowToJson } from 'enzyme-to-json';\n import { NextRouter } from 'next/router';\n import { Session } from 'components/withSession';\n import { ProfilePage } from '../index';\n-// import { GeneralInfo } from '../../../../../common/models/profile';\n \n jest.mock('next/config', () => () => ({}));\n jest.mock('services/user', () => ({\n@@ -12,80 +11,378 @@ jest.mock('services/user', () => ({\n getProfileInfo() {\n return jest.fn();\n }\n+ saveProfileInfo() {\n+ return jest.fn();\n+ }\n },\n }),\n );\n \n-describe('ProfilePage', () => {\n- const profile = {\n- generalInfo: {\n- name: 'Dzmitry Petrov',\n- githubId: 'petrov',\n- aboutMyself: 'Test',\n+const profile = {\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: false, all: false },\n+ isEmailVisible: { student: false, all: false },\n+ isTelegramVisible: { student: false, all: false },\n+ isSkypeVisible: { student: false, all: false },\n+ isPhoneVisible: { student: false, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ generalInfo: {\n+ aboutMyself: 'Test',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'a2+',\n+ locationId: 456,\n+ locationName: 'Brest',\n+ },\n+ contacts: {},\n+ mentorStats: [\n+ {},\n+ ],\n+ studentStats: [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n locationName: 'Minsk',\n- locationId: '1',\n- educationHistory: null,\n- englishLevel: 'a2+',\n- },\n- permissionsSettings: {\n- isProfileVisible: { all: true },\n- isAboutVisible: { mentor: true, student: false, all: false },\n- isEducationVisible: { mentor: true, student: false, all: false },\n- isEnglishVisible: { student: false, all: false },\n- isEmailVisible: { student: false, all: false },\n- isTelegramVisible: { student: false, all: false },\n- isSkypeVisible: { student: false, all: false },\n- isPhoneVisible: { student: false, all: false },\n- isContactsNotesVisible: { student: true, all: false },\n- isLinkedInVisible: { mentor: true, student: false, all: false },\n- isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n- isMentorStatsVisible: { mentor: true, student: true, all: false },\n- isStudentStatsVisible: { student: false, all: false },\n- },\n- contacts: {\n- phone: '+375292123456',\n- email: '[email protected]',\n- skype: 'petro:live',\n- telegram: 'petro',\n- notes: 'discord: @petro, instagram: @petro12',\n- },\n- isPermissionsSettingsChanged: true,\n- isProfileSettingsChanged: true,\n- };\n- const session = {\n- id: 2020,\n- githubId: 'mikhama',\n- isAdmin: true,\n- isHirer: false,\n- isActivist: false,\n- roles: {\n- 1: 'mentor',\n- 2: 'student',\n- 11: 'mentor',\n- },\n- coursesRoles: {\n- 13: [\n- 'manager',\n+ tasks: [\n+ {\n+ interviewFormAnswers: {},\n+ },\n ],\n },\n- } as Session;\n- const router = {\n- query: {\n- githubId: 'petrov',\n- },\n- asPath: '/#edit/',\n- } as unknown as NextRouter;\n+ ],\n+ publicFeedback: [\n+ {},\n+ ],\n+ stageInterviewFeedback: [\n+ {},\n+ ],\n+};\n+const session = {\n+ id: 2020,\n+ githubId: 'mikhama',\n+ isAdmin: true,\n+ isHirer: false,\n+ isActivist: false,\n+ roles: {\n+ 1: 'mentor',\n+ 2: 'student',\n+ 11: 'mentor',\n+ },\n+ coursesRoles: {\n+ 13: [\n+ 'manager',\n+ ],\n+ },\n+} as Session;\n+const router = {\n+ query: {\n+ githubId: 'petrov',\n+ },\n+ asPath: '/#edit/',\n+} as unknown as NextRouter;\n+const state = {\n+ profile,\n+ isInitialPermissionsSettingsChanged: false,\n+ isInitialProfileSettingsChanged: false,\n+};\n \n+describe('ProfilePage', () => {\n describe('Should render correctly', () => {\n- it('if full info about profile is in the state', () => {\n+ it('if full profile info is in the state', () => {\n const wrapper = shallow(\n <ProfilePage\n session={session}\n router={router}\n />,\n );\n- wrapper.setState({ profile });\n+ wrapper.setState(state);\n expect(shallowToJson(wrapper)).toMatchSnapshot();\n });\n });\n+\n+ const wrapper = shallow(\n+ <ProfilePage\n+ session={session}\n+ router={router}\n+ />,\n+ );\n+ const instance = wrapper.instance();\n+ describe('onPermissionsSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if permissions for student role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEmailVisible',\n+ role: 'student',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEmailVisible).toEqual({\n+ student: true, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for mentor role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: false,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isLinkedInVisible',\n+ role: 'mentor',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isLinkedInVisible).toEqual({\n+ mentor: false, student: false, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for all roles were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEducationVisible',\n+ role: 'all',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEducationVisible).toEqual({\n+ mentor: true, student: true, all: true,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('onProfileSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if \"profile.generalInfo.location\" was changed', async () => {\n+ const event = {\n+ id: 123,\n+ name: 'Minsk',\n+ }\n+ const path = 'generalInfo.location';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.locationId).toBe(123);\n+ expect(wrapper.state().profile.generalInfo.locationName).toBe('Minsk');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if \"profile.generalInfo.englishLevel\" was changed', async () => {\n+ const event = 'b2+';\n+ const path = 'generalInfo.englishLevel';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.englishLevel).toBe('b2+');\n+ });\n+ it('if field added to \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'add',\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([\n+ {\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ },\n+ {\n+ graduationYear: null,\n+ faculty: null,\n+ university: null,\n+ },\n+ ]);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if field deleted from \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'delete',\n+ index: 0,\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([]);\n+ });\n+ it('if some other field was changed', async () => {\n+ const event = {\n+ target: {\n+ value: 'Hello everyone, my name is Mike.',\n+ }\n+ };\n+ const path = 'generalInfo.aboutMyself';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.aboutMyself).toEqual('Hello everyone, my name is Mike.');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('changeProfilePageMode', () => {\n+ describe('Should set state correctly', () => {\n+ it('if mode = \"edit\" was passed', async () => {\n+ const mode = 'edit';\n+ wrapper.setState({ ...state, isEditingModeEnabled: false });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ });\n+ it('if mode = \"view\" was passed', async () => {\n+ const mode = 'view';\n+ wrapper.setState({ ...state, isEditingModeEnabled: true });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ });\n+ });\n+ });\n+ describe('saveProfile', () => {\n+ it('Should set state correctly', async () => {\n+ const profile = {\n+ generalInfo: {\n+ aboutMyself: 'Hello',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'c1',\n+ locationId: 778,\n+ locationName: 'Hrodna',\n+ },\n+ contacts: {\n+ telegram: 'test',\n+ },\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: true, all: true },\n+ isEmailVisible: { student: true, all: true },\n+ isTelegramVisible: { student: true, all: true },\n+ isSkypeVisible: { student: true, all: false },\n+ isPhoneVisible: { student: true, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ };\n+ wrapper.setState({\n+ ...state,\n+ profile,\n+ isInitialPermissionsSettingsChanged: true,\n+ isInitialProfileSettingsChanged: true,\n+ });\n+ await instance.saveProfile();\n+ expect(wrapper.state().isSaving).toBe(false);\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(false);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(false);\n+ expect(wrapper.state().initialPermissionsSettings).toEqual(profile.permissionsSettings);\n+ expect(wrapper.state().initialProfileSettings).toEqual(profile);\n+ });\n+ });\n+ describe('hadStudentCoreJSInterview', () => {\n+ describe('Should return', () => {\n+ it('\"true\" if student has an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(true);\n+ });\n+ it('\"false\" if student has not an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(false);\n+ });\n+ });\n+ });\n+ describe('getStudentCoreJSInterviews', () => {\n+ it('Should return info about CoreJS interviews', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.getStudentCoreJSInterviews(studentStats);\n+ expect(result).toEqual([\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ interview: {\n+ answers: {},\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ },\n+ locationName: 'Minsk',\n+ },\n+ ]);\n+ });\n+ });\n });\ndiff --git a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\nindex fbd133c..729b2de 100644\n--- a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n+++ b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n@@ -1,6 +1,6 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n-exports[`ProfilePage Should render correctly if 1`] = `\n+exports[`ProfilePage Should render correctly if full profile info is in the state 1`] = `\n <Fragment>\n <LoadingScreen\n show={true}\n@@ -50,12 +50,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -135,12 +139,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -220,12 +228,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -305,12 +317,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -387,15 +403,7 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n >\n <ContactsCard\n- data={\n- Object {\n- \"email\": \"[email protected]\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n+ data={Object {}}\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n onProfileSettingsChange={[Function]}\n@@ -461,84 +469,22 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n />\n </div>\n- </Masonry>\n- <JSXStyle\n- id=\"3803498300\"\n- >\n- div.jsx-3803498300{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-left:-16px;width:auto;}\n- </JSXStyle>\n- <JSXStyle\n- id=\"110195169\"\n- >\n- div.jsx-110195169{padding-left:16px;background-clip:padding-box;}\n- </JSXStyle>\n- </div>\n- </Spin>\n- </LoadingScreen>\n-</Fragment>\n-`;\n-\n-exports[`ProfilePage Should render correctly if full info about profile is in the state 1`] = `\n-<Fragment>\n- <LoadingScreen\n- show={true}\n- >\n- <Header\n- isProfileEditingModeEnabled={false}\n- isProfilePage={false}\n- isSaveButtonVisible={false}\n- onChangeProfilePageMode={[Function]}\n- onSaveClick={[Function]}\n- username=\"mikhama\"\n- />\n- <Spin\n- delay={200}\n- size=\"default\"\n- spinning={false}\n- wrapperClassName=\"\"\n- >\n- <div\n- style={\n- Object {\n- \"padding\": 10,\n- }\n- }\n- >\n- <Masonry\n- breakpointCols={\n- Object {\n- \"1100\": 3,\n- \"500\": 1,\n- \"700\": 2,\n- \"default\": 4,\n- }\n- }\n- className=\"jsx-3803498300\"\n- columnClassName=\"jsx-110195169\"\n- >\n <div\n- key=\"card-0\"\n+ key=\"card-5\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <MainCard\n+ <PublicFeedbackCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -602,28 +548,30 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-1\"\n+ key=\"card-6\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <AboutCard\n+ <StudentStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"locationName\": \"Minsk\",\n+ \"tasks\": Array [\n+ Object {\n+ \"interviewFormAnswers\": Object {},\n+ },\n+ ],\n+ },\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -687,28 +635,21 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-2\"\n+ key=\"card-7\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EnglishCard\n+ <MentorStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -772,170 +713,44 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-3\"\n+ key=\"card-8\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EducationCard\n+ <CoreJSIviewsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"interview\": Object {\n+ \"answers\": Object {},\n+ \"comment\": undefined,\n+ \"interviewer\": undefined,\n+ \"score\": undefined,\n+ },\n+ \"locationName\": \"Minsk\",\n+ },\n+ ]\n }\n />\n </div>\n <div\n- key=\"card-4\"\n+ key=\"card-9\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <ContactsCard\n+ <PreScreeningIviewsCard\n data={\n- Object {\n- \"email\": \"[email protected]\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n />\n </div>\n", "diff --git a/src/Tuple/Merge.ts b/src/Tuple/Merge.ts\nindex dfa7ce5..5ba44b7 100644\n--- a/src/Tuple/Merge.ts\n+++ b/src/Tuple/Merge.ts\n@@ -30,7 +30,7 @@ type _MergeFlat<O extends object, O1P extends object> = {\n }\n \n type MergeDeep<T extends any[], T1 extends any[]> =\n- TupleOf<Compute<_MergeDeep<T, Omit<ObjectOf<T1>, keyof T>, ObjectOf<T1>>>>\n+ TupleOf<Compute<_MergeDeep<ObjectOf<T>, Omit<ObjectOf<T1>, keyof T>, ObjectOf<T1>>>>\n // same principle as above, but with a little tweak\n // we keep the original `O1` to know if we can merge\n // => if `O` and `O1` have `object` fields of same name\n", "diff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\nindex e92e3c9..0b7a11a 100644\n--- a/scripts/bump-edge.ts\n+++ b/scripts/bump-edge.ts\n@@ -53,6 +53,7 @@ async function loadWorkspace (dir: string) {\n }\n \n const rename = (from: string, to: string) => {\n+ find(from).data._name = find(from).data.name\n find(from).data.name = to\n for (const pkg of packages) {\n pkg.updateDeps((dep) => {\n"]
5
["af0a5f7ab9d71fe20aa0888f682368f32b26fe18", "cad5e45208346528ad02cd04dcac863f90faa037", "11ffd5174bd61a2939ae58d2b2d43284302ae490", "c4d9e5023fa0f88ba283b37da27677ceda1cbfbb", "573f87edf9bdc19c9c4c3a978fad6ed3ce788f5f"]
["docs", "feat", "test", "fix", "build"]
change tests to depend on BrokerContext,unset DOCKER_HOST set to swarm by jenkins - fixes issue where old images are pushed to registry,add prewatch script to core,verify process responses for deploy command Tests should generally only fail for 1 reason, but the first test case (`shouldDeployResourceFromFile`) verifies multiple unrelated things. To align with the other test cases in this class, it makes sense that this test case only verifies that the gateway service was called with a specific request. We can extract the verification of the response into a separate test. This can also be applied to the shouldDeployMultipleResources test case.,rename ELECTRON_CACHE env variable to electron_config_cache (#21313)
["diff --git a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\nindex fe4e42d..37c7066 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n@@ -7,20 +7,14 @@\n */\n package io.camunda.zeebe.broker;\n \n-import io.atomix.cluster.AtomixCluster;\n import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupContextImpl;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupProcess;\n-import io.camunda.zeebe.broker.clustering.ClusterServices;\n import io.camunda.zeebe.broker.exporter.repo.ExporterLoadException;\n import io.camunda.zeebe.broker.exporter.repo.ExporterRepository;\n-import io.camunda.zeebe.broker.partitioning.PartitionManager;\n-import io.camunda.zeebe.broker.system.EmbeddedGatewayService;\n import io.camunda.zeebe.broker.system.SystemContext;\n import io.camunda.zeebe.broker.system.configuration.BrokerCfg;\n-import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.broker.system.monitoring.BrokerHealthCheckService;\n-import io.camunda.zeebe.broker.system.monitoring.DiskSpaceUsageMonitor;\n import io.camunda.zeebe.protocol.impl.encoding.BrokerInfo;\n import io.camunda.zeebe.util.LogUtil;\n import io.camunda.zeebe.util.VersionUtil;\n@@ -184,35 +178,15 @@ public final class Broker implements AutoCloseable {\n }\n \n // only used for tests\n- public EmbeddedGatewayService getEmbeddedGatewayService() {\n- return brokerContext.getEmbeddedGatewayService();\n- }\n-\n- public AtomixCluster getAtomixCluster() {\n- return brokerContext.getAtomixCluster();\n- }\n-\n- public ClusterServices getClusterServices() {\n- return brokerContext.getClusterServices();\n- }\n-\n- public DiskSpaceUsageMonitor getDiskSpaceUsageMonitor() {\n- return brokerContext.getDiskSpaceUsageMonitor();\n- }\n-\n- public BrokerAdminService getBrokerAdminService() {\n- return brokerContext.getBrokerAdminService();\n+ public BrokerContext getBrokerContext() {\n+ return brokerContext;\n }\n \n+ // only used for tests\n public SystemContext getSystemContext() {\n return systemContext;\n }\n \n- public PartitionManager getPartitionManager() {\n- return brokerContext.getPartitionManager();\n- }\n- // only used for tests\n-\n /**\n * Temporary helper object. This object is needed during the transition of broker startup/shutdown\n * steps to the new concept. Afterwards, the expectation is that this object will merge with the\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\nindex bda5170..1accbc1 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n@@ -45,11 +45,12 @@ public class BrokerSnapshotTest {\n (RaftPartition)\n brokerRule\n .getBroker()\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(PartitionId.from(PartitionManagerImpl.GROUP_NAME, PARTITION_ID));\n journalReader = raftPartition.getServer().openReader();\n- brokerAdminService = brokerRule.getBroker().getBrokerAdminService();\n+ brokerAdminService = brokerRule.getBroker().getBrokerContext().getBrokerAdminService();\n \n final String contactPoint = NetUtil.toSocketAddressString(brokerRule.getGatewayAddress());\n final ZeebeClientBuilder zeebeClientBuilder =\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\nindex e98e7d2..a831bfe 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n@@ -173,11 +173,11 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n }\n \n public ClusterServices getClusterServices() {\n- return broker.getClusterServices();\n+ return broker.getBrokerContext().getClusterServices();\n }\n \n public AtomixCluster getAtomixCluster() {\n- return broker.getAtomixCluster();\n+ return broker.getBrokerContext().getAtomixCluster();\n }\n \n public InetSocketAddress getGatewayAddress() {\n@@ -245,7 +245,8 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\nindex 890b596..8561cf1 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n@@ -29,6 +29,7 @@ import io.atomix.utils.net.Address;\n import io.camunda.zeebe.broker.Broker;\n import io.camunda.zeebe.broker.PartitionListener;\n import io.camunda.zeebe.broker.SpringBrokerBridge;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.exporter.stream.ExporterDirectorContext;\n import io.camunda.zeebe.broker.partitioning.PartitionManagerImpl;\n import io.camunda.zeebe.broker.system.SystemContext;\n@@ -602,11 +603,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void stepDown(final Broker broker, final int partitionId) {\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == partitionId)\n .map(RaftPartition.class::cast)\n@@ -617,14 +618,14 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void disconnect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).stop().join();\n ((NettyMessagingService) atomix.getMessagingService()).stop().join();\n }\n \n public void connect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).start().join();\n ((NettyMessagingService) atomix.getMessagingService()).start().join();\n@@ -666,11 +667,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n final var broker = brokers.get(expectedLeader);\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == START_PARTITION_ID)\n .map(RaftPartition.class::cast)\n@@ -775,14 +776,15 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void takeSnapshot(final Broker broker) {\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n }\n \n public void triggerAndWaitForSnapshots() {\n // Ensure that the exporter positions are distributed to the followers\n getClock().addTime(ExporterDirectorContext.DEFAULT_DISTRIBUTION_INTERVAL);\n getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::takeSnapshot);\n \n getBrokers()\n@@ -794,7 +796,7 @@ public final class ClusteringRule extends ExternalResource {\n .until(\n () -> {\n // Trigger snapshot again in case snapshot is not already taken\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n return getSnapshot(broker);\n },\n Optional::isPresent));\n@@ -831,7 +833,7 @@ public final class ClusteringRule extends ExternalResource {\n \n private Optional<SnapshotId> getSnapshot(final Broker broker, final int partitionId) {\n \n- final var partitions = broker.getBrokerAdminService().getPartitionStatus();\n+ final var partitions = broker.getBrokerContext().getBrokerAdminService().getPartitionStatus();\n final var partitionStatus = partitions.get(partitionId);\n \n return Optional.ofNullable(partitionStatus)\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\nindex f07961c..d46636b 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n@@ -88,6 +88,7 @@ public class DiskSpaceMonitoringFailOverTest {\n () ->\n clusteringRule\n .getBroker(newLeaderId)\n+ .getBrokerContext()\n .getBrokerAdminService()\n .getPartitionStatus()\n .get(1)\n@@ -96,7 +97,7 @@ public class DiskSpaceMonitoringFailOverTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\nindex 0a02a27..6e93cf9 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n@@ -165,7 +165,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -188,7 +188,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\nindex 9cef5a0..a487729 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n@@ -192,7 +192,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -215,7 +216,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\nindex 2d1e4f0..58f6f16 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n@@ -48,6 +48,7 @@ public class HealthMonitoringTest {\n final var raftPartition =\n (RaftPartition)\n leader\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\nindex 468f83c..7ff03be 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n@@ -11,6 +11,7 @@ import static org.assertj.core.api.Assertions.assertThat;\n \n import io.atomix.raft.RaftServer.Role;\n import io.camunda.zeebe.broker.Broker;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.it.clustering.ClusteringRule;\n@@ -48,7 +49,7 @@ public class BrokerAdminServiceClusterTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -61,7 +62,8 @@ public class BrokerAdminServiceClusterTest {\n // when\n final var followerStatus =\n followers.stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .map(BrokerAdminService::getPartitionStatus)\n .map(status -> status.get(1));\n \n@@ -94,7 +96,8 @@ public class BrokerAdminServiceClusterTest {\n \n // then\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(this::assertThatStatusContainsProcessedPositionInSnapshot);\n }\n \n@@ -102,7 +105,8 @@ public class BrokerAdminServiceClusterTest {\n public void shouldPauseAfterLeaderChange() {\n // given\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::pauseStreamProcessing);\n \n // when\n@@ -113,6 +117,7 @@ public class BrokerAdminServiceClusterTest {\n final var newLeaderAdminService =\n clusteringRule\n .getBroker(clusteringRule.getLeaderForPartition(1).getNodeId())\n+ .getBrokerContext()\n .getBrokerAdminService();\n assertStreamProcessorPhase(newLeaderAdminService, Phase.PAUSED);\n }\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\nindex 5160b50..2185329 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n@@ -41,7 +41,7 @@ public class BrokerAdminServiceTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -144,7 +144,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PAUSED);\n }\n \n@@ -161,7 +161,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PROCESSING);\n }\n \n@@ -176,7 +176,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.PAUSED);\n }\n \n@@ -193,7 +193,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.EXPORTING);\n }\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\nindex d6c8ab3..4582ad2 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n@@ -30,7 +30,7 @@ public class BrokerAdminServiceWithOutExporterTest {\n // given\n final var leader =\n clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- final var leaderAdminService = leader.getBrokerAdminService();\n+ final var leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n // when there are no exporters configured\n // then\n final var partitionStatus = leaderAdminService.getPartitionStatus().get(1);\ndiff --git a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\nindex 36bc0bf..d332201 100644\n--- a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n+++ b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n@@ -240,7 +240,8 @@ public class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \n", "diff --git a/.ci/docker.dsl b/.ci/docker.dsl\nindex 4768cb8..9f6a4c9 100644\n--- a/.ci/docker.dsl\n+++ b/.ci/docker.dsl\n@@ -8,6 +8,9 @@ def dockerHubUpload =\n '''\\\n #!/bin/bash -xeu\n \n+# clear docker host env set by jenkins job\n+unset DOCKER_HOST\n+\n VERSION=${RELEASE_VERSION}\n \n if [ \"${RELEASE_VERSION}\" = \"SNAPSHOT\" ]; then\n@@ -26,9 +29,6 @@ docker login --username ${DOCKER_HUB_USERNAME} --password ${DOCKER_HUB_PASSWORD}\n docker push camunda/zeebe:${RELEASE_VERSION}\n \n if [ \"${IS_LATEST}\" = \"true\" ]; then\n- # to make sure we can tag latest, there were problems before\n- docker rmi camunda/zeebe:latest\n-\n docker tag -f camunda/zeebe:${RELEASE_VERSION} camunda/zeebe:latest\n docker push camunda/zeebe:latest\n fi\n", "diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n", "diff --git a/clients/java/src/test/java/io/camunda/zeebe/client/process/DeployResourceTest.java b/clients/java/src/test/java/io/camunda/zeebe/client/process/DeployResourceTest.java\nindex 1d96c74..b65d9f3 100644\n--- a/clients/java/src/test/java/io/camunda/zeebe/client/process/DeployResourceTest.java\n+++ b/clients/java/src/test/java/io/camunda/zeebe/client/process/DeployResourceTest.java\n@@ -22,7 +22,6 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy;\n \n import io.camunda.zeebe.client.api.command.ClientException;\n import io.camunda.zeebe.client.api.response.DeploymentEvent;\n-import io.camunda.zeebe.client.api.response.Process;\n import io.camunda.zeebe.client.impl.command.StreamUtil;\n import io.camunda.zeebe.client.impl.response.ProcessImpl;\n import io.camunda.zeebe.client.util.ClientTest;\n@@ -35,7 +34,6 @@ import java.io.IOException;\n import java.io.InputStream;\n import java.nio.charset.StandardCharsets;\n import java.time.Duration;\n-import java.util.List;\n import org.junit.Test;\n \n public final class DeployResourceTest extends ClientTest {\n@@ -49,25 +47,15 @@ public final class DeployResourceTest extends ClientTest {\n @Test\n public void shouldDeployResourceFromFile() {\n // given\n- final long key = 123L;\n- final String filename = DeployResourceTest.class.getResource(BPMN_1_FILENAME).getPath();\n- gatewayService.onDeployResourceRequest(\n- key, deployedResource(deployedProcess(BPMN_1_PROCESS_ID, 12, 423, filename)));\n- final Process expected = new ProcessImpl(423, BPMN_1_PROCESS_ID, 12, filename);\n+ final String path = DeployResourceTest.class.getResource(BPMN_1_FILENAME).getPath();\n \n // when\n- final DeploymentEvent response =\n- client.newDeployCommand().addResourceFile(filename).send().join();\n+ client.newDeployCommand().addResourceFile(path).send().join();\n \n // then\n- assertThat(response.getKey()).isEqualTo(key);\n-\n- final List<Process> processes = response.getProcesses();\n- assertThat(processes).containsOnly(expected);\n-\n final DeployResourceRequest request = gatewayService.getLastRequest();\n final Resource resource = request.getResources(0);\n- assertThat(resource.getName()).isEqualTo(filename);\n+ assertThat(resource.getName()).isEqualTo(path);\n assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n }\n \n@@ -114,7 +102,6 @@ public final class DeployResourceTest extends ClientTest {\n // then\n final DeployResourceRequest request = gatewayService.getLastRequest();\n final Resource resource = request.getResources(0);\n-\n assertThat(resource.getName()).isEqualTo(filename);\n assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n }\n@@ -135,7 +122,6 @@ public final class DeployResourceTest extends ClientTest {\n // then\n final DeployResourceRequest request = gatewayService.getLastRequest();\n final Resource resource = request.getResources(0);\n-\n assertThat(resource.getName()).isEqualTo(filename);\n assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n }\n@@ -152,7 +138,6 @@ public final class DeployResourceTest extends ClientTest {\n // then\n final DeployResourceRequest request = gatewayService.getLastRequest();\n final Resource resource = request.getResources(0);\n-\n assertThat(resource.getName()).isEqualTo(filename);\n assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n }\n@@ -174,7 +159,6 @@ public final class DeployResourceTest extends ClientTest {\n // then\n final DeployResourceRequest request = gatewayService.getLastRequest();\n final Resource resource = request.getResources(0);\n-\n assertThat(resource.getName()).isEqualTo(filename);\n assertThat(resource.getContent().toByteArray()).isEqualTo(expectedBytes);\n }\n@@ -183,13 +167,58 @@ public final class DeployResourceTest extends ClientTest {\n public void shouldDeployMultipleResources() {\n // given\n final long key = 345L;\n-\n final String filename1 = BPMN_1_FILENAME.substring(1);\n final String filename2 = BPMN_2_FILENAME.substring(1);\n+ gatewayService.onDeployResourceRequest(\n+ key,\n+ deployedResource(deployedProcess(BPMN_1_PROCESS_ID, 1, 1, filename1)),\n+ deployedResource(deployedProcess(BPMN_2_PROCESS_ID, 1, 2, filename2)));\n \n- final Process expected1 = new ProcessImpl(1, BPMN_1_PROCESS_ID, 1, filename1);\n- final Process expected2 = new ProcessImpl(2, BPMN_2_PROCESS_ID, 1, filename2);\n+ // when\n+ client\n+ .newDeployCommand()\n+ .addResourceFromClasspath(filename1)\n+ .addResourceFromClasspath(filename2)\n+ .send()\n+ .join();\n \n+ // then\n+ final DeployResourceRequest request = gatewayService.getLastRequest();\n+ assertThat(request.getResourcesList()).hasSize(2);\n+\n+ final Resource resource1 = request.getResources(0);\n+ assertThat(resource1.getName()).isEqualTo(filename1);\n+ assertThat(resource1.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n+\n+ final Resource resource2 = request.getResources(1);\n+ assertThat(resource2.getName()).isEqualTo(filename2);\n+ assertThat(resource2.getContent().toByteArray()).isEqualTo(getBytes(BPMN_2_FILENAME));\n+ }\n+\n+ @Test\n+ public void shouldDeployProcessAsResource() {\n+ // given\n+ final long key = 123L;\n+ final String filename = DeployResourceTest.class.getResource(BPMN_1_FILENAME).getPath();\n+ gatewayService.onDeployResourceRequest(\n+ key, deployedResource(deployedProcess(BPMN_1_PROCESS_ID, 12, 423, filename)));\n+\n+ // when\n+ final DeploymentEvent response =\n+ client.newDeployCommand().addResourceFile(filename).send().join();\n+\n+ // then\n+ assertThat(response.getKey()).isEqualTo(key);\n+ assertThat(response.getProcesses())\n+ .containsExactly(new ProcessImpl(423, BPMN_1_PROCESS_ID, 12, filename));\n+ }\n+\n+ @Test\n+ public void shouldDeployMultipleProcessesAsResources() {\n+ // given\n+ final long key = 345L;\n+ final String filename1 = BPMN_1_FILENAME.substring(1);\n+ final String filename2 = BPMN_2_FILENAME.substring(1);\n gatewayService.onDeployResourceRequest(\n key,\n deployedResource(deployedProcess(BPMN_1_PROCESS_ID, 1, 1, filename1)),\n@@ -206,21 +235,10 @@ public final class DeployResourceTest extends ClientTest {\n \n // then\n assertThat(response.getKey()).isEqualTo(key);\n-\n- final List<Process> processes = response.getProcesses();\n- assertThat(processes).containsOnly(expected1, expected2);\n-\n- final DeployResourceRequest request = gatewayService.getLastRequest();\n- assertThat(request.getResourcesList()).hasSize(2);\n-\n- Resource resource = request.getResources(0);\n-\n- assertThat(resource.getName()).isEqualTo(filename1);\n- assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_1_FILENAME));\n-\n- resource = request.getResources(1);\n- assertThat(resource.getName()).isEqualTo(filename2);\n- assertThat(resource.getContent().toByteArray()).isEqualTo(getBytes(BPMN_2_FILENAME));\n+ assertThat(response.getProcesses())\n+ .containsExactly(\n+ new ProcessImpl(1, BPMN_1_PROCESS_ID, 1, filename1),\n+ new ProcessImpl(2, BPMN_2_PROCESS_ID, 1, filename2));\n }\n \n @Test\n", "diff --git a/docs/tutorial/installation.md b/docs/tutorial/installation.md\nindex d4af120..1a09eea 100644\n--- a/docs/tutorial/installation.md\n+++ b/docs/tutorial/installation.md\n@@ -82,7 +82,7 @@ with the network at all.\n On environments that have been using older versions of Electron, you might find the\n cache also in `~/.electron`.\n \n-You can also override the local cache location by providing a `ELECTRON_CACHE`\n+You can also override the local cache location by providing a `electron_config_cache`\n environment variable.\n \n The cache contains the version's official zip file as well as a checksum, stored as\n"]
5
["e52a6201093f273add4903dd5f4e55a63539386d", "8b18a58969ed2adf2df2a8bfe91aedacad3868f5", "aa0152baa4376b1087c86499a7c289b668d5ad55", "390eadc270d027493722cdbe9c8f4140d027e473", "f2f52c23b513dd857350f3c163f676d37189d0d3"]
["refactor", "ci", "build", "test", "docs"]
rename step,update version (v0.6.18),simplyfy statement,add --ignore-existing to all npx commands,set cursor position in setHorizontalRule correctly, fix #2429
["diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\nindex e81d897..5c3ee6b 100644\n--- a/.github/workflows/ci.yml\n+++ b/.github/workflows/ci.yml\n@@ -45,7 +45,7 @@ jobs:\n - name: Install dependencies\n run: pnpm install\n \n- - name: Typecheck\n+ - name: Build (stub)\n run: pnpm build:stub\n \n - name: Typecheck\n", "diff --git a/Cargo.lock b/Cargo.lock\nindex c32d8b4..599790e 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -94,7 +94,7 @@ dependencies = [\n \n [[package]]\n name = \"els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n dependencies = [\n \"erg_common\",\n \"erg_compiler\",\n@@ -105,7 +105,7 @@ dependencies = [\n \n [[package]]\n name = \"erg\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"els\",\n \"erg_common\",\n@@ -115,7 +115,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_common\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"backtrace-on-stack-overflow\",\n \"crossterm\",\n@@ -125,7 +125,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_compiler\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"erg_parser\",\n@@ -133,7 +133,7 @@ dependencies = [\n \n [[package]]\n name = \"erg_parser\"\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n dependencies = [\n \"erg_common\",\n \"unicode-xid\",\ndiff --git a/Cargo.toml b/Cargo.toml\nindex baaa0ac..5082cd3 100644\n--- a/Cargo.toml\n+++ b/Cargo.toml\n@@ -20,7 +20,7 @@ members = [\n ]\n \n [workspace.package]\n-version = \"0.6.18-nightly.2\"\n+version = \"0.6.18\"\n authors = [\"erg-lang team <[email protected]>\"]\n license = \"MIT OR Apache-2.0\"\n edition = \"2021\"\n@@ -64,10 +64,10 @@ full = [\"els\", \"full-repl\", \"unicode\", \"pretty\"]\n experimental = [\"erg_common/experimental\", \"erg_parser/experimental\", \"erg_compiler/experimental\"]\n \n [workspace.dependencies]\n-erg_common = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_common\" }\n-erg_parser = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_parser\" }\n-erg_compiler = { version = \"0.6.18-nightly.2\", path = \"./crates/erg_compiler\" }\n-els = { version = \"0.1.30-nightly.2\", path = \"./crates/els\" }\n+erg_common = { version = \"0.6.18\", path = \"./crates/erg_common\" }\n+erg_parser = { version = \"0.6.18\", path = \"./crates/erg_parser\" }\n+erg_compiler = { version = \"0.6.18\", path = \"./crates/erg_compiler\" }\n+els = { version = \"0.1.30\", path = \"./crates/els\" }\n \n [dependencies]\n erg_common = { workspace = true }\ndiff --git a/crates/els/Cargo.toml b/crates/els/Cargo.toml\nindex 3efbf4e..9f902fa 100644\n--- a/crates/els/Cargo.toml\n+++ b/crates/els/Cargo.toml\n@@ -2,7 +2,7 @@\n name = \"els\"\n description = \"An Erg compiler frontend for IDEs, implements LSP.\"\n documentation = \"http://docs.rs/els\"\n-version = \"0.1.30-nightly.2\"\n+version = \"0.1.30\"\n authors.workspace = true\n license.workspace = true\n edition.workspace = true\n", "diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n", "diff --git a/docs/getting-started/getting-started.md b/docs/getting-started/getting-started.md\nindex dc6db37..3ef9d0a 100644\n--- a/docs/getting-started/getting-started.md\n+++ b/docs/getting-started/getting-started.md\n@@ -13,7 +13,7 @@ npm install -g @angular/cli\n **Using `npx`**\n \n ```bash\n-npx create-nx-workspace myworkspace\n+npx --ignore-existing create-nx-workspace myworkspace\n ```\n \n **Using `npm init`**\ndiff --git a/docs/guides/react-and-angular.md b/docs/guides/react-and-angular.md\nindex c1929a2..a5651ff 100644\n--- a/docs/guides/react-and-angular.md\n+++ b/docs/guides/react-and-angular.md\n@@ -11,7 +11,7 @@ To show how Nx does it, let's build two applications (one in Angular, and one in\n Let's start by creating a new Nx workspace. The easiest way to do this is to use npx.\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=empty\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\n ```\n \n ## Creating an Angular Application\ndiff --git a/docs/guides/react.md b/docs/guides/react.md\nindex e1647fd..eac848e 100644\n--- a/docs/guides/react.md\n+++ b/docs/guides/react.md\n@@ -16,13 +16,13 @@ Nx has first class support for React: you can create React applications and libr\n Create a new Nx workspace. The easiest way to do it is to use npx.\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=empty\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\n ```\n \n You can also create a workspace with a React application in place by running:\n \n ```bash\n-npx create-nx-workspace happynrwl --preset=react\n+npx --ignore-existing create-nx-workspace happynrwl --preset=react\n ```\n \n ## Generating a React Application\ndiff --git a/docs/tutorial/01-create-application.md b/docs/tutorial/01-create-application.md\nindex ea87ecf..967a56e 100644\n--- a/docs/tutorial/01-create-application.md\n+++ b/docs/tutorial/01-create-application.md\n@@ -7,7 +7,7 @@ In this tutorial you will use Nx to build a full-stack application out of common\n **Start by creating a new workspace.**\n \n ```bash\n-npx create-nx-workspace myorg\n+npx --ignore-existing create-nx-workspace myorg\n ```\n \n When asked about 'preset', select `empty`.\n", "diff --git a/packages/extension-horizontal-rule/src/horizontal-rule.ts b/packages/extension-horizontal-rule/src/horizontal-rule.ts\nindex 6f583e1..c905b63 100644\n--- a/packages/extension-horizontal-rule/src/horizontal-rule.ts\n+++ b/packages/extension-horizontal-rule/src/horizontal-rule.ts\n@@ -49,15 +49,14 @@ export const HorizontalRule = Node.create<HorizontalRuleOptions>({\n // set cursor after horizontal rule\n .command(({ tr, dispatch }) => {\n if (dispatch) {\n- const { parent, pos } = tr.selection.$from\n- const posAfter = pos + 1\n- const nodeAfter = tr.doc.nodeAt(posAfter)\n+ const { $to } = tr.selection\n+ const posAfter = $to.end()\n \n- if (nodeAfter) {\n- tr.setSelection(TextSelection.create(tr.doc, posAfter))\n+ if ($to.nodeAfter) {\n+ tr.setSelection(TextSelection.create(tr.doc, $to.pos))\n } else {\n // add node after horizontal rule if it\u2019s the end of the document\n- const node = parent.type.contentMatch.defaultType?.create()\n+ const node = $to.parent.type.contentMatch.defaultType?.create()\n \n if (node) {\n tr.insert(posAfter, node)\n"]
5
["34875bc0e59b43d9041903101c823d25ec194a21", "bb3e3d9b96e435c3b92fc208bca93d1ad7e1ad50", "f86944ff00b970d7e2da48abbff43e58bdf29b99", "fc9af4d0b93d69be4e201ffb18da04324e8a4a87", "34d80114704679118e9bb6058e0d6c7aa03fd4b5"]
["ci", "build", "refactor", "docs", "fix"]
Remove hasmany and belongsto from context menu Signed-off-by: Pranav C <[email protected]>,add missing region to cloudformation_stack_set,Add the select function for logicflow,add prewatch script to core,correct code comment
["diff --git a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\nindex 5bc6f67..aaa297c 100644\n--- a/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n+++ b/packages/nc-gui/components/project/spreadsheet/rowsXcDataTable.vue\n@@ -261,37 +261,7 @@\n :size=\"size\"\n @input=\"loadTableData\"\n />\n- <!-- <v-pagination\n- v-if=\"count !== Infinity\"\n- style=\"max-width: 100%\"\n- v-model=\"page\"\n- :length=\"Math.ceil(count / size)\"\n- :total-visible=\"8\"\n- @input=\"loadTableData\"\n- color=\"primary lighten-2\"\n- ></v-pagination>\n- <div v-else class=\"mx-auto d-flex align-center mt-n1 \" style=\"max-width:250px\">\n- <span class=\"caption\" style=\"white-space: nowrap\"> Change page:</span>\n- <v-text-field\n- class=\"ml-1 caption\"\n- :full-width=\"false\"\n- outlined\n- dense\n- hide-details\n- v-model=\"page\"\n- @keydown.enter=\"loadTableData\"\n- type=\"number\"\n- >\n- <template #append>\n- <x-icon tooltip=\"Change page\" small icon.class=\"mt-1\" @click=\"loadTableData\">mdi-keyboard-return\n- </x-icon>\n- </template>\n- </v-text-field>\n- </div>-->\n </template>\n- <!-- <div v-else class=\"d-flex justify-center py-4\">-->\n- <!-- <v-alert type=\"info\" dense class=\"ma-1 flex-shrink-1\">Table is empty</v-alert>-->\n- <!-- </div>-->\n </div>\n \n <spreadsheet-nav-drawer\n@@ -414,9 +384,9 @@\n <span class=\"caption\">Delete Selected Rows</span>\n </v-list-item>\n </template>\n- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n+ <!-- <template v-if=\"meta.hasMany && meta.hasMany.length\">\n <v-divider v-if=\"isEditable && !isLocked\" />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Has Many</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Has Many</span>\n \n <v-list-item v-for=\"(hm,i) in meta.hasMany\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(hm,'hm')\">\n <span class=\"caption text-capitalize\">{{ hm._tn }}</span>\n@@ -425,12 +395,12 @@\n \n <template v-if=\"meta.belongsTo && meta.belongsTo.length\">\n <v-divider />\n- <span class=\"ml-3 grey--text \" style=\"font-size: 9px\">Belongs To</span>\n+ <span class=\"ml-3 grey&#45;&#45;text \" style=\"font-size: 9px\">Belongs To</span>\n \n <v-list-item v-for=\"(bt,i) in belongsTo\" :key=\"i\" @click=\"addNewRelationTabCtxMenu(bt,'bt')\">\n <span class=\"caption text-capitalize\">{{ bt._rtn }}</span>\n </v-list-item>\n- </template>\n+ </template>-->\n </v-list>\n </v-menu>\n <v-dialog\n", "diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n", "diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\nindex 0d913b7..dcc59b3 100644\n--- a/packages/core/src/LogicFlow.tsx\n+++ b/packages/core/src/LogicFlow.tsx\n@@ -276,6 +276,12 @@ export default class LogicFlow {\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\n }\n /**\n+ * \u5c06\u56fe\u5f62\u9009\u4e2d\n+ */\n+ select(id: string) {\n+ this.graphModel.selectElementById(id);\n+ }\n+ /**\n * \u5c06\u56fe\u5f62\u5b9a\u4f4d\u5230\u753b\u5e03\u4e2d\u5fc3\n * @param focusOnArgs \u652f\u6301\u7528\u6237\u4f20\u5165\u56fe\u5f62\u5f53\u524d\u7684\u5750\u6807\u6216id\uff0c\u53ef\u4ee5\u901a\u8fc7type\u6765\u533a\u5206\u662f\u8282\u70b9\u8fd8\u662f\u8fde\u7ebf\u7684id\uff0c\u4e5f\u53ef\u4ee5\u4e0d\u4f20\uff08\u515c\u5e95\uff09\n */\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\nindex 94d0899..10280a9 100644\n--- a/packages/core/src/model/GraphModel.ts\n+++ b/packages/core/src/model/GraphModel.ts\n@@ -481,6 +481,13 @@ class GraphModel {\n this.selectElement?.setSelected(true);\n }\n \n+ @action\n+ selectElementById(id: string) {\n+ this.selectElement?.setSelected(false);\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\n+ this.selectElement?.setSelected(true);\n+ }\n+\n /* \u4fee\u6539\u8fde\u7ebf\u7c7b\u578b */\n @action\n changeEdgeType(type: string): void {\n", "diff --git a/.gitignore b/.gitignore\nindex 3445558..72257cb 100644\n--- a/.gitignore\n+++ b/.gitignore\n@@ -1,4 +1,5 @@\n *.o\n+.env\n settheory\n constraint\n Main\ndiff --git a/packages/core/package.json b/packages/core/package.json\nindex 9ba8b93..79bd532 100644\n--- a/packages/core/package.json\n+++ b/packages/core/package.json\n@@ -14,6 +14,7 @@\n \"build:parsers\": \"nearleyc src/parser/Domain.ne > src/parser/DomainParser.ts && nearleyc src/parser/Substance.ne > src/parser/SubstanceParser.ts && nearleyc src/parser/Style.ne > src/parser/StyleParser.ts\",\n \"prebuild\": \"yarn build:parsers\",\n \"prestart\": \"yarn build:parsers\",\n+ \"prewatch\": \"yarn build:parsers\",\n \"test\": \"jest --watchAll=false\",\n \"test:watch\": \"jest --watchAll\",\n \"build\": \"rollup -c\",\n", "diff --git a/server/src/db.rs b/server/src/db.rs\nindex bfc5e17..0fb4d55 100644\n--- a/server/src/db.rs\n+++ b/server/src/db.rs\n@@ -389,7 +389,7 @@ impl Db {\n let partition = LockableCatalogPartition::new(Arc::clone(&self), partition);\n \n // Do lock dance to get a write lock on the partition as well\n- // as on all of the chunks\n+ // as on the to-be-dropped chunk.\n let partition = partition.read();\n \n let chunk = self.lockable_chunk(table_name, partition_key, chunk_id)?;\n"]
5
["7dbbb64c45506ef634180638db800b6d9535523d", "304d0588f634e9e72087a706367c53af9c7f7180", "6ae067153cd2608018fd3da76bd6d00a08da4b3a", "aa0152baa4376b1087c86499a7c289b668d5ad55", "cccdd8a43fea7614f78b6f1dcf1765100928a3db"]
["refactor", "fix", "feat", "build", "docs"]