diff --git "a/novel-translation/09_tune-lf-medium-py3.11.ipynb" "b/novel-translation/09_tune-lf-medium-py3.11.ipynb" --- "a/novel-translation/09_tune-lf-medium-py3.11.ipynb" +++ "b/novel-translation/09_tune-lf-medium-py3.11.ipynb" @@ -304,7 +304,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Fri Jul 5 08:59:30 2024 \r\n+---------------------------------------------------------------------------------------+\r\n| NVIDIA-SMI 535.54.03 Driver Version: 535.54.03 CUDA Version: 12.2 |\r\n|-----------------------------------------+----------------------+----------------------+\r\n| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\r\n| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\r\n| | | MIG M. |\r\n|=========================================+======================+======================|\r\n| 0 Tesla T4 Off | 00000001:00:00.0 Off | 0 |\r\n| N/A 31C P8 9W / 70W | 2MiB / 15360MiB | 0% Default |\r\n| | | N/A |\r\n+-----------------------------------------+----------------------+----------------------+\r\n \r\n+---------------------------------------------------------------------------------------+\r\n| Processes: |\r\n| GPU GI CI PID Type Process name GPU Memory |\r\n| ID ID Usage |\r\n|=======================================================================================|\r\n| No running processes found |\r\n+---------------------------------------------------------------------------------------+\r\n" + "Sat Jul 6 05:25:48 2024 \r\n+---------------------------------------------------------------------------------------+\r\n| NVIDIA-SMI 535.54.03 Driver Version: 535.54.03 CUDA Version: 12.2 |\r\n|-----------------------------------------+----------------------+----------------------+\r\n| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\r\n| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\r\n| | | MIG M. |\r\n|=========================================+======================+======================|\r\n| 0 Tesla T4 Off | 00000001:00:00.0 Off | 0 |\r\n| N/A 63C P8 11W / 70W | 2MiB / 15360MiB | 0% Default |\r\n| | | N/A |\r\n+-----------------------------------------+----------------------+----------------------+\r\n \r\n+---------------------------------------------------------------------------------------+\r\n| Processes: |\r\n| GPU GI CI PID Type Process name GPU Memory |\r\n| ID ID Usage |\r\n|=======================================================================================|\r\n| No running processes found |\r\n+---------------------------------------------------------------------------------------+\r\n" ] } ], @@ -333,7 +333,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Python 3.11.0rc1\r\nName: flash-attn\nVersion: 2.5.9.post1\nSummary: Flash Attention: Fast and Memory-Efficient Exact Attention\nHome-page: https://github.com/Dao-AILab/flash-attention\nAuthor: Tri Dao\nAuthor-email: trid@cs.stanford.edu\nLicense: \nLocation: /local_disk0/.ephemeral_nfs/envs/pythonEnv-40f92d71-6c52-44a3-a1ef-62cdea633f68/lib/python3.11/site-packages\nRequires: einops, torch\nRequired-by: \nCPU times: user 10.5 ms, sys: 15.1 ms, total: 25.6 ms\nWall time: 4.37 s\n" + "Python 3.11.0rc1\r\nName: flash-attn\nVersion: 2.5.9.post1\nSummary: Flash Attention: Fast and Memory-Efficient Exact Attention\nHome-page: https://github.com/Dao-AILab/flash-attention\nAuthor: Tri Dao\nAuthor-email: trid@cs.stanford.edu\nLicense: \nLocation: /local_disk0/.ephemeral_nfs/envs/pythonEnv-40f92d71-6c52-44a3-a1ef-62cdea633f68/lib/python3.11/site-packages\nRequires: einops, torch\nRequired-by: \nCPU times: user 10.1 ms, sys: 14.5 ms, total: 24.6 ms\nWall time: 4.25 s\n" ] } ], @@ -364,7 +364,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Current Directory:\r\n/Workspace/Users/donghao.huang@mastercard.com/llm-finetuning/llama-factory\r\nconfig/llama3_8b_lora_sft.yaml:\r\n {\r\n \"model_name_or_path\": \"gradientai/Llama-3-8B-Instruct-Gradient-1048k\",\r\n \"stage\": \"sft\",\r\n \"do_train\": true,\r\n \"finetuning_type\": \"lora\",\r\n \"lora_target\": \"all\",\r\n \"quantization_bit\": 4,\r\n \"loraplus_lr_ratio\": 16.0,\r\n \"dataset\": \"alpaca_mac\",\r\n \"template\": \"llama3\",\r\n \"cutoff_len\": 1024,\r\n \"max_samples\": 4528,\r\n \"overwrite_cache\": true,\r\n \"preprocessing_num_workers\": 16,\r\n \"output_dir\": \"/Workspace/Users/donghao.huang@mastercard.com/lf-saves/llama3-8b/lora/sft/\",\r\n \"logging_steps\": 10,\r\n \"save_steps\": 560,\r\n \"plot_loss\": true,\r\n \"overwrite_output_dir\": true,\r\n \"per_device_train_batch_size\": 1,\r\n \"gradient_accumulation_steps\": 8,\r\n \"learning_rate\": 0.0001,\r\n \"num_train_epochs\": 6.0,\r\n \"lr_scheduler_type\": \"cosine\",\r\n \"warmup_ratio\": 0.1,\r\n \"bf16\": true,\r\n \"ddp_timeout\": 180000000,\r\n \"val_size\": 0.01,\r\n \"per_device_eval_batch_size\": 1,\r\n \"eval_strategy\": \"steps\",\r\n \"eval_steps\": 560,\r\n \"report_to\": \"none\"\r\n}\r\n2024-07-05 08:59:40.790008: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\r\nTo enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n[2024-07-05 08:59:50,632] [INFO] [real_accelerator.py:191:get_accelerator] Setting ds_accelerator to cuda (auto detect)\r\n07/05/2024 08:59:58 - WARNING - llamafactory.hparams.parser - We recommend enable `upcast_layernorm` in quantized training.\r\n07/05/2024 08:59:58 - INFO - llamafactory.hparams.parser - Process rank: 0, device: cuda:0, n_gpu: 1, distributed training: False, compute dtype: torch.bfloat16\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-05 08:59:59,223 >> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/tokenizer.json\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-05 08:59:59,223 >> loading file added_tokens.json from cache at None\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-05 08:59:59,223 >> loading file special_tokens_map.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/special_tokens_map.json\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-05 08:59:59,223 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/tokenizer_config.json\r\n[WARNING|logging.py:313] 2024-07-05 08:59:59,517 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\r\n07/05/2024 08:59:59 - INFO - llamafactory.data.template - Replace eos token: <|eot_id|>\r\n07/05/2024 08:59:59 - INFO - llamafactory.data.template - Add pad token: <|eot_id|>\r\n07/05/2024 08:59:59 - INFO - llamafactory.data.loader - Loading dataset alpaca_mac.json...\r\n\rConverting format of dataset (num_proc=16): 0%| | 0/4528 [00:00<|start_header_id|>user<|end_header_id|>\r\n\r\nPlease translate the following Chinese text into English and provide only the translated content, nothing else.\r\n全仗着狐仙搭救。<|eot_id|><|start_header_id|>assistant<|end_header_id|>\r\n\r\nBecause I was protected by a fox fairy.<|eot_id|>\r\nlabel_ids:\r\n[-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 18433, 358, 574, 2682, 555, 264, 39935, 45586, 13, 128009]\r\nlabels:\r\nBecause I was protected by a fox fairy.<|eot_id|>\r\n[INFO|configuration_utils.py:733] 2024-07-05 09:00:03,851 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/config.json\r\n[INFO|configuration_utils.py:800] 2024-07-05 09:00:03,852 >> Model config LlamaConfig {\r\n \"_name_or_path\": \"gradientai/Llama-3-8B-Instruct-Gradient-1048k\",\r\n \"architectures\": [\r\n \"LlamaForCausalLM\"\r\n ],\r\n \"attention_bias\": false,\r\n \"attention_dropout\": 0.0,\r\n \"bos_token_id\": 128000,\r\n \"eos_token_id\": 128001,\r\n \"hidden_act\": \"silu\",\r\n \"hidden_size\": 4096,\r\n \"initializer_range\": 0.02,\r\n \"intermediate_size\": 14336,\r\n \"max_position_embeddings\": 1048576,\r\n \"mlp_bias\": false,\r\n \"model_type\": \"llama\",\r\n \"num_attention_heads\": 32,\r\n \"num_hidden_layers\": 32,\r\n \"num_key_value_heads\": 8,\r\n \"pretraining_tp\": 1,\r\n \"rms_norm_eps\": 1e-05,\r\n \"rope_scaling\": null,\r\n \"rope_theta\": 3580165449.0,\r\n \"tie_word_embeddings\": false,\r\n \"torch_dtype\": \"bfloat16\",\r\n \"transformers_version\": \"4.42.3\",\r\n \"use_cache\": true,\r\n \"vocab_size\": 128256\r\n}\r\n\r\n07/05/2024 09:00:03 - INFO - llamafactory.model.model_utils.quantization - Quantizing model to 4 bit with bitsandbytes.\r\n[INFO|modeling_utils.py:3556] 2024-07-05 09:00:03,878 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/model.safetensors.index.json\r\n[INFO|modeling_utils.py:1531] 2024-07-05 09:00:03,880 >> Instantiating LlamaForCausalLM model under default dtype torch.bfloat16.\r\n[INFO|configuration_utils.py:1000] 2024-07-05 09:00:03,881 >> Generate config GenerationConfig {\r\n \"bos_token_id\": 128000,\r\n \"eos_token_id\": 128001\r\n}\r\n\r\n\rLoading checkpoint shards: 0%| | 0/4 [00:00> All model checkpoint weights were used when initializing LlamaForCausalLM.\r\n\r\n[INFO|modeling_utils.py:4372] 2024-07-05 09:00:08,031 >> All the weights of LlamaForCausalLM were initialized from the model checkpoint at gradientai/Llama-3-8B-Instruct-Gradient-1048k.\r\nIf your task is similar to the task the model of the checkpoint was trained on, you can already use LlamaForCausalLM for predictions without further training.\r\n[INFO|configuration_utils.py:955] 2024-07-05 09:00:08,056 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/generation_config.json\r\n[INFO|configuration_utils.py:1000] 2024-07-05 09:00:08,057 >> Generate config GenerationConfig {\r\n \"bos_token_id\": 128000,\r\n \"do_sample\": true,\r\n \"eos_token_id\": [\r\n 128001,\r\n 128009\r\n ],\r\n \"max_length\": 4096,\r\n \"temperature\": 0.6,\r\n \"top_p\": 0.9\r\n}\r\n\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.model_utils.checkpointing - Gradient checkpointing enabled.\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.adapter - Upcasting trainable params to float32.\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.adapter - Fine-tuning method: LoRA\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.model_utils.misc - Found linear modules: gate_proj,v_proj,o_proj,k_proj,up_proj,down_proj,q_proj\r\n07/05/2024 09:00:08 - INFO - llamafactory.model.loader - trainable params: 20,971,520 || all params: 8,051,232,768 || trainable%: 0.2605\r\n[INFO|trainer.py:642] 2024-07-05 09:00:09,078 >> Using auto half precision backend\r\ntraining_args.resume_from_checkpoint: None\r\n07/05/2024 09:00:09 - INFO - llamafactory.train.trainer_utils - Using LoRA+ optimizer with loraplus lr ratio 16.00.\r\n[INFO|trainer.py:2128] 2024-07-05 09:00:09,496 >> ***** Running training *****\r\n[INFO|trainer.py:2129] 2024-07-05 09:00:09,496 >> Num examples = 4,482\r\n[INFO|trainer.py:2130] 2024-07-05 09:00:09,496 >> Num Epochs = 6\r\n[INFO|trainer.py:2131] 2024-07-05 09:00:09,496 >> Instantaneous batch size per device = 1\r\n[INFO|trainer.py:2134] 2024-07-05 09:00:09,496 >> Total train batch size (w. parallel, distributed & accumulation) = 8\r\n[INFO|trainer.py:2135] 2024-07-05 09:00:09,496 >> Gradient Accumulation steps = 8\r\n[INFO|trainer.py:2136] 2024-07-05 09:00:09,496 >> Total optimization steps = 3,360\r\n[INFO|trainer.py:2137] 2024-07-05 09:00:09,500 >> Number of trainable parameters = 20,971,520\r\n\r 0%| | 0/3360 [00:00> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/tokenizer.json\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-06 05:26:16,583 >> loading file added_tokens.json from cache at None\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-06 05:26:16,583 >> loading file special_tokens_map.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/special_tokens_map.json\r\n[INFO|tokenization_utils_base.py:2161] 2024-07-06 05:26:16,583 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/tokenizer_config.json\r\n[WARNING|logging.py:313] 2024-07-06 05:26:16,885 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\r\n07/06/2024 05:26:16 - INFO - llamafactory.data.template - Replace eos token: <|eot_id|>\r\n07/06/2024 05:26:16 - INFO - llamafactory.data.template - Add pad token: <|eot_id|>\r\n07/06/2024 05:26:17 - INFO - llamafactory.data.loader - Loading dataset alpaca_mac.json...\r\n\rConverting format of dataset (num_proc=16): 0%| | 0/4528 [00:00<|start_header_id|>user<|end_header_id|>\r\n\r\nPlease translate the following Chinese text into English and provide only the translated content, nothing else.\r\n全仗着狐仙搭救。<|eot_id|><|start_header_id|>assistant<|end_header_id|>\r\n\r\nBecause I was protected by a fox fairy.<|eot_id|>\r\nlabel_ids:\r\n[-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 18433, 358, 574, 2682, 555, 264, 39935, 45586, 13, 128009]\r\nlabels:\r\nBecause I was protected by a fox fairy.<|eot_id|>\r\n[INFO|configuration_utils.py:733] 2024-07-06 05:26:21,331 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/config.json\r\n[INFO|configuration_utils.py:800] 2024-07-06 05:26:21,332 >> Model config LlamaConfig {\r\n \"_name_or_path\": \"gradientai/Llama-3-8B-Instruct-Gradient-1048k\",\r\n \"architectures\": [\r\n \"LlamaForCausalLM\"\r\n ],\r\n \"attention_bias\": false,\r\n \"attention_dropout\": 0.0,\r\n \"bos_token_id\": 128000,\r\n \"eos_token_id\": 128001,\r\n \"hidden_act\": \"silu\",\r\n \"hidden_size\": 4096,\r\n \"initializer_range\": 0.02,\r\n \"intermediate_size\": 14336,\r\n \"max_position_embeddings\": 1048576,\r\n \"mlp_bias\": false,\r\n \"model_type\": \"llama\",\r\n \"num_attention_heads\": 32,\r\n \"num_hidden_layers\": 32,\r\n \"num_key_value_heads\": 8,\r\n \"pretraining_tp\": 1,\r\n \"rms_norm_eps\": 1e-05,\r\n \"rope_scaling\": null,\r\n \"rope_theta\": 3580165449.0,\r\n \"tie_word_embeddings\": false,\r\n \"torch_dtype\": \"bfloat16\",\r\n \"transformers_version\": \"4.42.3\",\r\n \"use_cache\": true,\r\n \"vocab_size\": 128256\r\n}\r\n\r\n07/06/2024 05:26:21 - INFO - llamafactory.model.model_utils.quantization - Quantizing model to 4 bit with bitsandbytes.\r\n[INFO|modeling_utils.py:3556] 2024-07-06 05:26:21,358 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/model.safetensors.index.json\r\n[INFO|modeling_utils.py:1531] 2024-07-06 05:26:21,360 >> Instantiating LlamaForCausalLM model under default dtype torch.bfloat16.\r\n[INFO|configuration_utils.py:1000] 2024-07-06 05:26:21,361 >> Generate config GenerationConfig {\r\n \"bos_token_id\": 128000,\r\n \"eos_token_id\": 128001\r\n}\r\n\r\n\rLoading checkpoint shards: 0%| | 0/4 [00:00> All model checkpoint weights were used when initializing LlamaForCausalLM.\r\n\r\n[INFO|modeling_utils.py:4372] 2024-07-06 05:26:25,528 >> All the weights of LlamaForCausalLM were initialized from the model checkpoint at gradientai/Llama-3-8B-Instruct-Gradient-1048k.\r\nIf your task is similar to the task the model of the checkpoint was trained on, you can already use LlamaForCausalLM for predictions without further training.\r\n[INFO|configuration_utils.py:955] 2024-07-06 05:26:25,553 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--gradientai--Llama-3-8B-Instruct-Gradient-1048k/snapshots/8697fb25cb77c852311e03b4464b8467471d56a4/generation_config.json\r\n[INFO|configuration_utils.py:1000] 2024-07-06 05:26:25,553 >> Generate config GenerationConfig {\r\n \"bos_token_id\": 128000,\r\n \"do_sample\": true,\r\n \"eos_token_id\": [\r\n 128001,\r\n 128009\r\n ],\r\n \"max_length\": 4096,\r\n \"temperature\": 0.6,\r\n \"top_p\": 0.9\r\n}\r\n\r\n07/06/2024 05:26:25 - INFO - llamafactory.model.model_utils.checkpointing - Gradient checkpointing enabled.\r\n07/06/2024 05:26:25 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\r\n07/06/2024 05:26:25 - INFO - llamafactory.model.adapter - Upcasting trainable params to float32.\r\n07/06/2024 05:26:25 - INFO - llamafactory.model.adapter - Fine-tuning method: LoRA\r\n07/06/2024 05:26:25 - INFO - llamafactory.model.model_utils.misc - Found linear modules: gate_proj,v_proj,o_proj,k_proj,up_proj,down_proj,q_proj\r\n07/06/2024 05:26:26 - INFO - llamafactory.model.loader - trainable params: 20,971,520 || all params: 8,051,232,768 || trainable%: 0.2605\r\n[INFO|trainer.py:642] 2024-07-06 05:26:26,145 >> Using auto half precision backend\r\n07/06/2024 05:26:26 - WARNING - llamafactory.train.callbacks - Previous trainer log in this folder will be deleted.\r\ntraining_args.resume_from_checkpoint: None\r\n07/06/2024 05:26:26 - INFO - llamafactory.train.trainer_utils - Using LoRA+ optimizer with loraplus lr ratio 16.00.\r\n[INFO|trainer.py:2128] 2024-07-06 05:26:26,821 >> ***** Running training *****\r\n[INFO|trainer.py:2129] 2024-07-06 05:26:26,821 >> Num examples = 4,482\r\n[INFO|trainer.py:2130] 2024-07-06 05:26:26,821 >> Num Epochs = 6\r\n[INFO|trainer.py:2131] 2024-07-06 05:26:26,821 >> Instantaneous batch size per device = 1\r\n[INFO|trainer.py:2134] 2024-07-06 05:26:26,821 >> Total train batch size (w. parallel, distributed & accumulation) = 8\r\n[INFO|trainer.py:2135] 2024-07-06 05:26:26,821 >> Gradient Accumulation steps = 8\r\n[INFO|trainer.py:2136] 2024-07-06 05:26:26,821 >> Total optimization steps = 3,360\r\n[INFO|trainer.py:2137] 2024-07-06 05:26:26,825 >> Number of trainable parameters = 20,971,520\r\n\r 0%| | 0/3360 [00:00\n", - " .ansiout {\n", - " display: block;\n", - " unicode-bidi: embed;\n", - " white-space: pre-wrap;\n", - " word-wrap: break-word;\n", - " word-break: break-all;\n", - " font-family: \"Menlo\", \"Monaco\", \"Consolas\", \"Ubuntu Mono\", \"Source Code Pro\", monospace;\n", - " font-size: 13px;\n", - " color: #555;\n", - " margin-left: 4px;\n", - " line-height: 19px;\n", - " }\n", - "" + "text/plain": [ + "com.databricks.backend.common.rpc.CommandCancelledException\n", + "\tat com.databricks.spark.chauffeur.ExecContextState.cancel(ExecContextState.scala:429)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.cancelExecution(ChauffeurState.scala:1225)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.$anonfun$process$1(ChauffeurState.scala:958)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionContext(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionTags(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperationWithResultTags(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperation(ChauffeurState.scala:67)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.process(ChauffeurState.scala:914)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequest$1(Chauffeur.scala:679)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.$anonfun$applyOrElse$5(Chauffeur.scala:705)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequestWithUsageLogging$1(Chauffeur.scala:704)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:759)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:552)\n", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive0$2(ServerBackend.scala:174)\n", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)\n", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)\n", + "\tat com.databricks.rpc.ServerBackend.internalReceive0(ServerBackend.scala:171)\n", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive$1(ServerBackend.scala:147)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperation(ServerBackend.scala:22)\n", + "\tat com.databricks.rpc.ServerBackend.internalReceive(ServerBackend.scala:146)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRPC(JettyServer.scala:1021)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRequestAndRespond(JettyServer.scala:942)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6(JettyServer.scala:546)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6$adapted(JettyServer.scala:515)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$6(ActivityContextFactory.scala:546)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withAttributionContext(ActivityContextFactory.scala:57)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$3(ActivityContextFactory.scala:546)\n", + "\tat com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:72)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withActivityInternal(ActivityContextFactory.scala:524)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withServiceRequestActivity(ActivityContextFactory.scala:178)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleHttp(JettyServer.scala:515)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.doPost(JettyServer.scala:405)\n", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:665)\n", + "\tat com.databricks.rpc.HttpServletWithPatch.service(HttpServletWithPatch.scala:33)\n", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:750)\n", + "\tat org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)\n", + "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)\n", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:190)\n", + "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)\n", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)\n", + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)\n", + "\tat org.eclipse.jetty.server.Server.handle(Server.java:516)\n", + "\tat org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)\n", + "\tat org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)\n", + "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479)\n", + "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)\n", + "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)\n", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection$DecryptedEndPoint.onFillable(SslConnection.java:555)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection.onFillable(SslConnection.java:410)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection$2.succeeded(SslConnection.java:164)\n", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)\n", + "\tat org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)\n", + "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$2(InstrumentedQueuedThreadPool.scala:106)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.withAttributionContext(InstrumentedQueuedThreadPool.scala:46)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$1(InstrumentedQueuedThreadPool.scala:106)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads(QueuedThreadPoolInstrumenter.scala:150)\n", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads$(QueuedThreadPoolInstrumenter.scala:147)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.trackActiveThreads(InstrumentedQueuedThreadPool.scala:46)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.run(InstrumentedQueuedThreadPool.scala:88)\n", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)\n", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)\n", + "\tat java.lang.Thread.run(Thread.java:750)" ] }, "metadata": { "application/vnd.databricks.v1+output": { + "addedWidgets": {}, "arguments": {}, - "data": "", - "errorSummary": "", - "errorTraceType": null, - "metadata": {}, - "type": "ipynbError" + "datasetInfos": [], + "jupyterProps": null, + "metadata": { + "errorSummary": "Cancelled" + }, + "removedWidgets": [], + "sqlProps": null, + "stackFrames": [ + "com.databricks.backend.common.rpc.CommandCancelledException", + "\tat com.databricks.spark.chauffeur.ExecContextState.cancel(ExecContextState.scala:429)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.cancelExecution(ChauffeurState.scala:1225)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.$anonfun$process$1(ChauffeurState.scala:958)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionContext(ChauffeurState.scala:67)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionTags(ChauffeurState.scala:67)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperationWithResultTags(ChauffeurState.scala:67)", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperation(ChauffeurState.scala:67)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.process(ChauffeurState.scala:914)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequest$1(Chauffeur.scala:679)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.$anonfun$applyOrElse$5(Chauffeur.scala:705)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequestWithUsageLogging$1(Chauffeur.scala:704)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:759)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:552)", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive0$2(ServerBackend.scala:174)", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)", + "\tat com.databricks.rpc.ServerBackend.internalReceive0(ServerBackend.scala:171)", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive$1(ServerBackend.scala:147)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)", + "\tat com.databricks.rpc.ServerBackend.recordOperation(ServerBackend.scala:22)", + "\tat com.databricks.rpc.ServerBackend.internalReceive(ServerBackend.scala:146)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRPC(JettyServer.scala:1021)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRequestAndRespond(JettyServer.scala:942)", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6(JettyServer.scala:546)", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6$adapted(JettyServer.scala:515)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$6(ActivityContextFactory.scala:546)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withAttributionContext(ActivityContextFactory.scala:57)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$3(ActivityContextFactory.scala:546)", + "\tat com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:72)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withActivityInternal(ActivityContextFactory.scala:524)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withServiceRequestActivity(ActivityContextFactory.scala:178)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleHttp(JettyServer.scala:515)", + "\tat com.databricks.rpc.JettyServer$RequestManager.doPost(JettyServer.scala:405)", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:665)", + "\tat com.databricks.rpc.HttpServletWithPatch.service(HttpServletWithPatch.scala:33)", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:750)", + "\tat org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)", + "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:190)", + "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)", + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)", + "\tat org.eclipse.jetty.server.Server.handle(Server.java:516)", + "\tat org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)", + "\tat org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)", + "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479)", + "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)", + "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)", + "\tat org.eclipse.jetty.io.ssl.SslConnection$DecryptedEndPoint.onFillable(SslConnection.java:555)", + "\tat org.eclipse.jetty.io.ssl.SslConnection.onFillable(SslConnection.java:410)", + "\tat org.eclipse.jetty.io.ssl.SslConnection$2.succeeded(SslConnection.java:164)", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)", + "\tat org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)", + "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$2(InstrumentedQueuedThreadPool.scala:106)", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.withAttributionContext(InstrumentedQueuedThreadPool.scala:46)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$1(InstrumentedQueuedThreadPool.scala:106)", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads(QueuedThreadPoolInstrumenter.scala:150)", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads$(QueuedThreadPoolInstrumenter.scala:147)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.trackActiveThreads(InstrumentedQueuedThreadPool.scala:46)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.run(InstrumentedQueuedThreadPool.scala:88)", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)", + "\tat java.lang.Thread.run(Thread.java:750)" + ], + "type": "baseError" } }, "output_type": "display_data" @@ -464,31 +710,285 @@ { "output_type": "display_data", "data": { - "text/html": [ - "" + "text/plain": [ + "com.databricks.backend.common.rpc.CommandCancelledException\n", + "\tat com.databricks.spark.chauffeur.ExecContextState.cancel(ExecContextState.scala:429)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.cancelExecution(ChauffeurState.scala:1225)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.$anonfun$process$1(ChauffeurState.scala:958)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionContext(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionTags(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperationWithResultTags(ChauffeurState.scala:67)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperation(ChauffeurState.scala:67)\n", + "\tat com.databricks.spark.chauffeur.ChauffeurState.process(ChauffeurState.scala:914)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequest$1(Chauffeur.scala:679)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.$anonfun$applyOrElse$5(Chauffeur.scala:705)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequestWithUsageLogging$1(Chauffeur.scala:704)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:759)\n", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:552)\n", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive0$2(ServerBackend.scala:174)\n", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)\n", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)\n", + "\tat com.databricks.rpc.ServerBackend.internalReceive0(ServerBackend.scala:171)\n", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive$1(ServerBackend.scala:147)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)\n", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)\n", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)\n", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)\n", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)\n", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)\n", + "\tat com.databricks.rpc.ServerBackend.recordOperation(ServerBackend.scala:22)\n", + "\tat com.databricks.rpc.ServerBackend.internalReceive(ServerBackend.scala:146)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRPC(JettyServer.scala:1021)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRequestAndRespond(JettyServer.scala:942)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6(JettyServer.scala:546)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6$adapted(JettyServer.scala:515)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$6(ActivityContextFactory.scala:546)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withAttributionContext(ActivityContextFactory.scala:57)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$3(ActivityContextFactory.scala:546)\n", + "\tat com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:72)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withActivityInternal(ActivityContextFactory.scala:524)\n", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withServiceRequestActivity(ActivityContextFactory.scala:178)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleHttp(JettyServer.scala:515)\n", + "\tat com.databricks.rpc.JettyServer$RequestManager.doPost(JettyServer.scala:405)\n", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:665)\n", + "\tat com.databricks.rpc.HttpServletWithPatch.service(HttpServletWithPatch.scala:33)\n", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:750)\n", + "\tat org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)\n", + "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)\n", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:190)\n", + "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)\n", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)\n", + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)\n", + "\tat org.eclipse.jetty.server.Server.handle(Server.java:516)\n", + "\tat org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)\n", + "\tat org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)\n", + "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479)\n", + "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)\n", + "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)\n", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection$DecryptedEndPoint.onFillable(SslConnection.java:555)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection.onFillable(SslConnection.java:410)\n", + "\tat org.eclipse.jetty.io.ssl.SslConnection$2.succeeded(SslConnection.java:164)\n", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)\n", + "\tat org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)\n", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)\n", + "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$2(InstrumentedQueuedThreadPool.scala:106)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)\n", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)\n", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)\n", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.withAttributionContext(InstrumentedQueuedThreadPool.scala:46)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$1(InstrumentedQueuedThreadPool.scala:106)\n", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads(QueuedThreadPoolInstrumenter.scala:150)\n", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads$(QueuedThreadPoolInstrumenter.scala:147)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.trackActiveThreads(InstrumentedQueuedThreadPool.scala:46)\n", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.run(InstrumentedQueuedThreadPool.scala:88)\n", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)\n", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)\n", + "\tat java.lang.Thread.run(Thread.java:750)" ] }, "metadata": { "application/vnd.databricks.v1+output": { + "addedWidgets": {}, "arguments": {}, - "data": "", - "errorSummary": "", - "errorTraceType": null, - "metadata": {}, - "type": "ipynbError" + "datasetInfos": [], + "jupyterProps": null, + "metadata": { + "errorSummary": "Cancelled" + }, + "removedWidgets": [], + "sqlProps": null, + "stackFrames": [ + "com.databricks.backend.common.rpc.CommandCancelledException", + "\tat com.databricks.spark.chauffeur.ExecContextState.cancel(ExecContextState.scala:429)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.cancelExecution(ChauffeurState.scala:1225)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.$anonfun$process$1(ChauffeurState.scala:958)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionContext(ChauffeurState.scala:67)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.withAttributionTags(ChauffeurState.scala:67)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperationWithResultTags(ChauffeurState.scala:67)", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.recordOperation(ChauffeurState.scala:67)", + "\tat com.databricks.spark.chauffeur.ChauffeurState.process(ChauffeurState.scala:914)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequest$1(Chauffeur.scala:679)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.$anonfun$applyOrElse$5(Chauffeur.scala:705)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.handleDriverRequestWithUsageLogging$1(Chauffeur.scala:704)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:759)", + "\tat com.databricks.spark.chauffeur.Chauffeur$$anon$1$$anonfun$receive$1.applyOrElse(Chauffeur.scala:552)", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive0$2(ServerBackend.scala:174)", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)", + "\tat com.databricks.rpc.ServerBackend$$anonfun$commonReceive$1.applyOrElse(ServerBackend.scala:200)", + "\tat com.databricks.rpc.ServerBackend.internalReceive0(ServerBackend.scala:171)", + "\tat com.databricks.rpc.ServerBackend.$anonfun$internalReceive$1(ServerBackend.scala:147)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:525)", + "\tat com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:629)", + "\tat com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:647)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.ServerBackend.withAttributionContext(ServerBackend.scala:22)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags(AttributionContextTracing.scala:95)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionTags$(AttributionContextTracing.scala:76)", + "\tat com.databricks.rpc.ServerBackend.withAttributionTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:624)", + "\tat com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:534)", + "\tat com.databricks.rpc.ServerBackend.recordOperationWithResultTags(ServerBackend.scala:22)", + "\tat com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:526)", + "\tat com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:494)", + "\tat com.databricks.rpc.ServerBackend.recordOperation(ServerBackend.scala:22)", + "\tat com.databricks.rpc.ServerBackend.internalReceive(ServerBackend.scala:146)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRPC(JettyServer.scala:1021)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleRequestAndRespond(JettyServer.scala:942)", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6(JettyServer.scala:546)", + "\tat com.databricks.rpc.JettyServer$RequestManager.$anonfun$handleHttp$6$adapted(JettyServer.scala:515)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$6(ActivityContextFactory.scala:546)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withAttributionContext(ActivityContextFactory.scala:57)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.$anonfun$withActivityInternal$3(ActivityContextFactory.scala:546)", + "\tat com.databricks.context.integrity.IntegrityCheckContext$ThreadLocalStorage$.withValue(IntegrityCheckContext.scala:72)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withActivityInternal(ActivityContextFactory.scala:524)", + "\tat com.databricks.logging.activity.ActivityContextFactory$.withServiceRequestActivity(ActivityContextFactory.scala:178)", + "\tat com.databricks.rpc.JettyServer$RequestManager.handleHttp(JettyServer.scala:515)", + "\tat com.databricks.rpc.JettyServer$RequestManager.doPost(JettyServer.scala:405)", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:665)", + "\tat com.databricks.rpc.HttpServletWithPatch.service(HttpServletWithPatch.scala:33)", + "\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:750)", + "\tat org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)", + "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554)", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:190)", + "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)", + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)", + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)", + "\tat org.eclipse.jetty.server.Server.handle(Server.java:516)", + "\tat org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)", + "\tat org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)", + "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479)", + "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)", + "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)", + "\tat org.eclipse.jetty.io.ssl.SslConnection$DecryptedEndPoint.onFillable(SslConnection.java:555)", + "\tat org.eclipse.jetty.io.ssl.SslConnection.onFillable(SslConnection.java:410)", + "\tat org.eclipse.jetty.io.ssl.SslConnection$2.succeeded(SslConnection.java:164)", + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105)", + "\tat org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)", + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)", + "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$2(InstrumentedQueuedThreadPool.scala:106)", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)", + "\tat com.databricks.logging.AttributionContextTracing.$anonfun$withAttributionContext$1(AttributionContextTracing.scala:48)", + "\tat com.databricks.logging.AttributionContext$.$anonfun$withValue$1(AttributionContext.scala:244)", + "\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)", + "\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:240)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext(AttributionContextTracing.scala:46)", + "\tat com.databricks.logging.AttributionContextTracing.withAttributionContext$(AttributionContextTracing.scala:43)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.withAttributionContext(InstrumentedQueuedThreadPool.scala:46)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.$anonfun$run$1(InstrumentedQueuedThreadPool.scala:106)", + "\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads(QueuedThreadPoolInstrumenter.scala:150)", + "\tat com.databricks.instrumentation.QueuedThreadPoolInstrumenter.trackActiveThreads$(QueuedThreadPoolInstrumenter.scala:147)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool.trackActiveThreads(InstrumentedQueuedThreadPool.scala:46)", + "\tat com.databricks.rpc.InstrumentedQueuedThreadPool$$anon$1.run(InstrumentedQueuedThreadPool.scala:88)", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)", + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)", + "\tat java.lang.Thread.run(Thread.java:750)" + ], + "type": "baseError" } }, "output_type": "display_data"